+8
.env.example
+8
.env.example
+112
CHANGELOG.md
+112
CHANGELOG.md
···
1
+
# Changelog
2
+
3
+
All notable changes to this project will be documented in this file.
4
+
5
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
6
+
7
+
## [Unreleased]
8
+
9
+
## [0.6.0] - 2026-01-09
10
+
11
+
### Added
12
+
13
+
- **Profile card on OAuth consent page** showing authorizing user's identity
14
+
- Displays avatar, display name, and handle from Bluesky public API
15
+
- Fetches profile client-side using `login_hint` parameter
16
+
- Graceful degradation if fetch fails (shows handle only)
17
+
18
+
## [0.5.0] - 2026-01-08
19
+
20
+
### Added
21
+
22
+
- **Direct OAuth authorization** without requiring Pushed Authorization Requests (PAR)
23
+
- `/oauth/authorize` now accepts direct query parameters (client_id, redirect_uri, code_challenge, etc.)
24
+
- Creates authorization request record on-the-fly, same as PAR flow
25
+
- DPoP binding deferred to token exchange time for direct auth flows
26
+
- Matches official AT Protocol PDS behavior
27
+
28
+
### Changed
29
+
30
+
- AS metadata: `require_pushed_authorization_requests` now `false`
31
+
- Extracted `validateAuthorizationParameters()` helper shared between PAR and direct auth
32
+
33
+
## [0.4.0] - 2026-01-08
34
+
35
+
### Added
36
+
37
+
- **Foreign DID proxying** via `atproto-proxy` header
38
+
- `parseAtprotoProxyHeader()` parses `did:web:api.bsky.app#bsky_appview` format
39
+
- `getKnownServiceUrl()` maps known service DIDs to URLs
40
+
- `proxyToService()` generic proxy utility with header forwarding
41
+
- Repo endpoints (getRecord, listRecords, describeRepo) support explicit proxying
42
+
- Returns appropriate errors for malformed headers or unknown services
43
+
- Unit tests for proxy utilities
44
+
- E2E tests for foreign DID proxying behavior
45
+
46
+
### Changed
47
+
48
+
- Refactored `handleAppViewProxy` to use shared `proxyToService` utility
49
+
50
+
## [0.3.0] - 2026-01-08
51
+
52
+
### Added
53
+
54
+
- **Granular OAuth scope enforcement** on repo and blob endpoints
55
+
- `parseRepoScope()` parses `repo:collection?action=create&action=update` format
56
+
- `parseBlobScope()` parses `blob:image/*` format with MIME wildcards
57
+
- `ScopePermissions` class for checking repo/blob permissions
58
+
- Enforced on createRecord, putRecord, deleteRecord, applyWrites, uploadBlob
59
+
- **Consent page permissions table** displaying scopes in human-readable format
60
+
- Identity-only: "wants to uniquely identify you" message
61
+
- Granular scopes: Table with Collection + Create/Update/Delete columns
62
+
- Full access: Warning banner for `transition:generic`
63
+
- `parseScopesForDisplay()` helper for consent page rendering
64
+
- E2E tests for scope enforcement and consent page display
65
+
66
+
## [0.2.0] - 2026-01-07
67
+
68
+
### Added
69
+
70
+
- **OAuth 2.0 authorization server** with full AT Protocol support
71
+
- Discovery endpoints (AS metadata, protected resource, JWKS)
72
+
- Pushed Authorization Requests (PAR)
73
+
- Authorization endpoint with dark-themed consent UI
74
+
- Token endpoint (authorization_code + refresh_token grants)
75
+
- Token revocation (RFC 7009)
76
+
- DPoP proof validation and token binding
77
+
- PKCE with S256 code challenge
78
+
- Client metadata fetching and validation
79
+
- Loopback client support for development
80
+
- DPoP JTI tracking to prevent replay attacks
81
+
- Comprehensive OAuth e2e tests
82
+
83
+
### Changed
84
+
85
+
- **BREAKING:** Normalized SQL schema to snake_case convention
86
+
- Tables: `blob` โ `blobs`, `record_blob` โ `record_blobs`
87
+
- Columns: `mimeType` โ `mime_type`, `createdAt` โ `created_at`, `blobCid` โ `blob_cid`, `recordUri` โ `record_uri`
88
+
- Existing Durable Objects require storage reset
89
+
- Consolidated error responses to use `errorResponse` helper
90
+
- Moved OAuth types to TYPES & CONSTANTS section
91
+
92
+
## [0.1.0] - 2025-01-07
93
+
94
+
Initial experimental release.
95
+
96
+
### Added
97
+
98
+
- **Repo operations:** createRecord, getRecord, putRecord, deleteRecord, applyWrites, listRecords
99
+
- **Sync endpoints:** getRepo (CAR export), subscribeRepos (WebSocket firehose), getLatestCommit
100
+
- **Authentication:** createSession, getSession, refreshSession with JWT tokens
101
+
- **Blob storage:** uploadBlob, getBlob, listBlobs with R2 backend
102
+
- MIME type sniffing (JPEG, PNG, GIF, WebP, MP4, AVIF, HEIC)
103
+
- Automatic orphaned blob cleanup via DO alarms
104
+
- Blob-record association tracking
105
+
- **Identity:** Handle resolution, PLC directory registration
106
+
- **Federation:** Relay notification (requestCrawl), AppView proxy for app.bsky.* endpoints
107
+
- **Infrastructure:**
108
+
- Merkle Search Tree (MST) for repo structure
109
+
- DAG-CBOR encoding with CID generation
110
+
- P-256 ECDSA signing via Web Crypto
111
+
- TypeScript checking via JSDoc annotations
112
+
- Setup script for key generation and PLC registration
+21
LICENSE
+21
LICENSE
···
1
+
MIT License
2
+
3
+
Copyright (c) 2025 Chad Miller
4
+
5
+
Permission is hereby granted, free of charge, to any person obtaining a copy
6
+
of this software and associated documentation files (the "Software"), to deal
7
+
in the Software without restriction, including without limitation the rights
8
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+
copies of the Software, and to permit persons to whom the Software is
10
+
furnished to do so, subject to the following conditions:
11
+
12
+
The above copyright notice and this permission notice shall be included in all
13
+
copies or substantial portions of the Software.
14
+
15
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+
SOFTWARE.
+95
README.md
+95
README.md
···
1
+
# pds.js
2
+
3
+
A zero-dependency AT Protocol Personal Data Server written in JavaScript, running on Cloudflare Workers with Durable Objects. Let's see how far we can get with just Web APIs.
4
+
5
+
> โ ๏ธ **Work in progress** - This is experimental. You probably shouldn't use this yet.
6
+
7
+
## Status
8
+
9
+
- [x] Repo operations (createRecord, getRecord, putRecord, deleteRecord, applyWrites, listRecords)
10
+
- [x] Sync endpoints (getRepo, getRecord, subscribeRepos, listRepos, getLatestCommit)
11
+
- [x] Auth (createSession, getSession, refreshSession)
12
+
- [x] Handle resolution (resolveHandle)
13
+
- [x] AppView proxy (app.bsky.* forwarding with service auth)
14
+
- [x] Relay notification (requestCrawl)
15
+
- [x] Single or multi-user (each DID gets isolated storage, no self-service signup yet)
16
+
- [x] Blob storage (uploadBlob, getBlob, listBlobs)
17
+
- [x] OAuth 2.0 (PAR, authorization code + PKCE, DPoP-bound tokens, refresh, revoke)
18
+
- [ ] deleteSession (logout)
19
+
- [ ] updateHandle
20
+
- [ ] importRepo
21
+
- [ ] Account management (createAccount, deleteAccount)
22
+
- [ ] Email verification
23
+
- [ ] Invite codes
24
+
- [ ] Admin/moderation
25
+
- [ ] Rate limiting
26
+
27
+
See [endpoint comparison](docs/endpoint-comparison.md) for detailed coverage vs the official atproto PDS.
28
+
29
+
## Prerequisites
30
+
31
+
- Node.js 18+
32
+
33
+
## Quick Start
34
+
35
+
```bash
36
+
npm install
37
+
38
+
# Create local dev config
39
+
cp .env.example .dev.vars
40
+
# Edit .dev.vars with your values
41
+
42
+
# Run locally
43
+
npm run dev
44
+
```
45
+
46
+
## Configuration
47
+
48
+
For local development, create `.dev.vars`:
49
+
50
+
```
51
+
PDS_PASSWORD=your-password # Used for legacy auth and OAuth consent
52
+
JWT_SECRET=your-secret
53
+
RELAY_HOST=https://bsky.network # optional
54
+
```
55
+
56
+
For production, use Cloudflare secrets:
57
+
58
+
```bash
59
+
wrangler secret put PDS_PASSWORD
60
+
wrangler secret put JWT_SECRET
61
+
wrangler secret put RELAY_HOST # optional
62
+
```
63
+
64
+
### Blob Storage
65
+
66
+
Blobs (images, videos) are stored in Cloudflare R2. Create the bucket before deploying:
67
+
68
+
```bash
69
+
npx wrangler r2 bucket create pds-blobs
70
+
```
71
+
72
+
The binding is already configured in `wrangler.toml`. Supported formats: JPEG, PNG, GIF, WebP, MP4. Max size: 50MB. Orphaned blobs are automatically cleaned up after 24 hours.
73
+
74
+
## Testing
75
+
76
+
```bash
77
+
npm test # Unit tests
78
+
npm run test:e2e # E2E tests (starts local server)
79
+
```
80
+
81
+
## Deploy
82
+
83
+
```bash
84
+
wrangler deploy
85
+
```
86
+
87
+
## Initialize
88
+
89
+
After deployment, run the setup script to register with PLC and initialize:
90
+
91
+
```bash
92
+
npm run setup -- --pds https://your-pds.workers.dev
93
+
```
94
+
95
+
This generates keys, registers your DID with the PLC directory, initializes the PDS, and saves credentials. Handle defaults to the worker hostname.
+24
biome.json
+24
biome.json
···
1
+
{
2
+
"$schema": "https://biomejs.dev/schemas/2.3.11/schema.json",
3
+
"vcs": {
4
+
"enabled": true,
5
+
"clientKind": "git",
6
+
"useIgnoreFile": true
7
+
},
8
+
"formatter": {
9
+
"enabled": true,
10
+
"indentStyle": "space",
11
+
"indentWidth": 2
12
+
},
13
+
"linter": {
14
+
"enabled": true,
15
+
"rules": {
16
+
"recommended": true
17
+
}
18
+
},
19
+
"javascript": {
20
+
"formatter": {
21
+
"quoteStyle": "single"
22
+
}
23
+
}
24
+
}
+31
docker-compose.yml
+31
docker-compose.yml
···
1
+
services:
2
+
plc:
3
+
build:
4
+
context: https://github.com/did-method-plc/did-method-plc.git
5
+
dockerfile: packages/server/Dockerfile
6
+
ports:
7
+
- "2582:2582"
8
+
environment:
9
+
- DATABASE_URL=postgres://plc:plc@postgres:5432/plc
10
+
- PORT=2582
11
+
command: ["dumb-init", "node", "--enable-source-maps", "../dist/bin.js"]
12
+
depends_on:
13
+
postgres:
14
+
condition: service_healthy
15
+
16
+
postgres:
17
+
image: postgres:16-alpine
18
+
environment:
19
+
- POSTGRES_USER=plc
20
+
- POSTGRES_PASSWORD=plc
21
+
- POSTGRES_DB=plc
22
+
volumes:
23
+
- plc_data:/var/lib/postgresql/data
24
+
healthcheck:
25
+
test: ["CMD-SHELL", "pg_isready -U plc"]
26
+
interval: 2s
27
+
timeout: 5s
28
+
retries: 10
29
+
30
+
volumes:
31
+
plc_data:
+190
docs/endpoint-comparison.md
+190
docs/endpoint-comparison.md
···
1
+
# PDS Endpoint Comparison: pds.js vs atproto/packages/pds
2
+
3
+
Comparison of endpoints and parameters between this implementation and the official AT Protocol PDS.
4
+
5
+
---
6
+
7
+
## Endpoints Missing from pds.js
8
+
9
+
### com.atproto.admin.* (entire namespace missing)
10
+
11
+
| Endpoint | Params |
12
+
|----------|--------|
13
+
| deleteAccount | did |
14
+
| disableAccountInvites | account, note |
15
+
| disableInviteCodes | accounts, codes |
16
+
| enableAccountInvites | account, note |
17
+
| getAccountInfo | did |
18
+
| getAccountInfos | dids |
19
+
| getInviteCodes | cursor, limit, sort |
20
+
| getSubjectStatus | blob, did, uri |
21
+
| searchAccounts | cursor, email, limit |
22
+
| sendEmail | comment, content, recipientDid, senderDid, subject |
23
+
| updateAccountEmail | account, email |
24
+
| updateAccountHandle | did, handle |
25
+
| updateAccountPassword | did, password |
26
+
| updateAccountSigningKey | did, signingKey |
27
+
| updateSubjectStatus | deactivated, subject, takedown |
28
+
29
+
### com.atproto.identity.* (mostly missing)
30
+
31
+
| Endpoint | Params | Notes |
32
+
|----------|--------|-------|
33
+
| getRecommendedDidCredentials | (none) | |
34
+
| refreshIdentity | identifier | |
35
+
| requestPlcOperationSignature | (none) | |
36
+
| resolveDid | did | |
37
+
| resolveIdentity | identifier | |
38
+
| signPlcOperation | alsoKnownAs, rotationKeys, services, token, verificationMethods | |
39
+
| submitPlcOperation | operation | |
40
+
| updateHandle | handle | |
41
+
42
+
*pds.js only implements: resolveHandle*
43
+
44
+
### com.atproto.server.* (many missing)
45
+
46
+
| Endpoint | Params |
47
+
|----------|--------|
48
+
| activateAccount | (none) |
49
+
| checkAccountStatus | (none) |
50
+
| confirmEmail | email, token |
51
+
| createAccount | did, email, handle, inviteCode, password, plcOp, recoveryKey, verificationCode, verificationPhone |
52
+
| createAppPassword | name, privileged |
53
+
| createInviteCode | forAccount, useCount |
54
+
| createInviteCodes | codeCount, forAccounts, useCount |
55
+
| deactivateAccount | deleteAfter |
56
+
| deleteAccount | did, password, token |
57
+
| deleteSession | (none) |
58
+
| getAccountInviteCodes | createAvailable, includeUsed |
59
+
| getServiceAuth | aud, exp, lxm |
60
+
| listAppPasswords | (none) |
61
+
| requestAccountDelete | (none) |
62
+
| requestEmailConfirmation | (none) |
63
+
| requestEmailUpdate | (none) |
64
+
| requestPasswordReset | email |
65
+
| reserveSigningKey | did |
66
+
| resetPassword | password, token |
67
+
| revokeAppPassword | name |
68
+
| updateEmail | email, emailAuthFactor, token |
69
+
70
+
*pds.js implements: createSession, getSession, refreshSession, describeServer*
71
+
72
+
### com.atproto.sync.* (some missing)
73
+
74
+
| Endpoint | Params |
75
+
|----------|--------|
76
+
| getBlocks | cids, did |
77
+
| getHostStatus | hostname |
78
+
| listHosts | cursor, limit |
79
+
| listReposByCollection | collection, cursor, limit |
80
+
| notifyOfUpdate | hostname |
81
+
| requestCrawl | hostname |
82
+
83
+
*pds.js implements: listRepos, getLatestCommit, getRepoStatus, getRepo, getRecord, getBlob, listBlobs, subscribeRepos*
84
+
85
+
### com.atproto.repo.* (some missing)
86
+
87
+
| Endpoint | Params |
88
+
|----------|--------|
89
+
| importRepo | (binary) |
90
+
| listMissingBlobs | cursor, limit |
91
+
92
+
*pds.js implements: createRecord, deleteRecord, putRecord, applyWrites, getRecord, describeRepo, listRecords, uploadBlob*
93
+
94
+
### com.atproto.moderation.*
95
+
96
+
| Endpoint | Params |
97
+
|----------|--------|
98
+
| createReport | modTool, reason, reasonType, subject |
99
+
100
+
### com.atproto.temp.* (entire namespace missing)
101
+
102
+
| Endpoint | Params |
103
+
|----------|--------|
104
+
| addReservedHandle | handle |
105
+
| checkHandleAvailability | birthDate, email, handle |
106
+
| checkSignupQueue | (none) |
107
+
| dereferenceScope | scope |
108
+
| fetchLabels | limit, since |
109
+
| requestPhoneVerification | phoneNumber |
110
+
| revokeAccountCredentials | account |
111
+
112
+
---
113
+
114
+
## Missing Parameters in Shared Endpoints
115
+
116
+
Endpoints that exist in both implementations, but pds.js is missing parameters:
117
+
118
+
| Endpoint | pds.js has | Missing from pds.js |
119
+
|----------|------------|---------------------|
120
+
| repo.createRecord | collection, record, rkey | **repo**, **validate**, swapCommit |
121
+
| repo.deleteRecord | collection, rkey | **repo**, swapCommit, swapRecord |
122
+
| repo.putRecord | collection, rkey, record | **repo**, **validate**, swapCommit, swapRecord |
123
+
| repo.applyWrites | writes | **repo**, validate, swapCommit |
124
+
| sync.getRepo | did | since |
125
+
| sync.listBlobs | did, cursor, limit | since |
126
+
| sync.listRepos | (none) | cursor, limit |
127
+
| server.createSession | identifier, password | allowTakendown, authFactorToken |
128
+
129
+
**Bold** = likely important for compatibility
130
+
131
+
---
132
+
133
+
## app.bsky.* Coverage
134
+
135
+
Both implementations handle app.bsky.* the same way:
136
+
137
+
| Category | Endpoints | Notes |
138
+
|----------|-----------|-------|
139
+
| Native (stored in PDS) | actor.getPreferences, actor.putPreferences | Both implementations |
140
+
| Proxied to AppView | ~87 endpoints | feed.*, graph.*, notification.*, etc. |
141
+
142
+
---
143
+
144
+
## Extra in pds.js (not in atproto spec)
145
+
146
+
Custom endpoints specific to this implementation:
147
+
148
+
| Endpoint | Purpose |
149
+
|----------|---------|
150
+
| `POST /init` | Initialize PDS with DID/keys |
151
+
| `GET /status` | Health check |
152
+
| `POST /register-did` | Register a DID |
153
+
| `GET /get-registered-dids` | List registered DIDs |
154
+
| `POST /register-handle` | Register a handle |
155
+
| `GET /resolve-handle` | Resolve handle (non-XRPC path) |
156
+
| `POST /forward-event` | Forward sync events to other instances |
157
+
| `GET /repo-info` | Get repository info |
158
+
| `GET /oauth-public-key` | Get OAuth public key |
159
+
| `POST /check-dpop-jti` | DPoP replay protection |
160
+
161
+
### OAuth 2.0 Stack (full implementation)
162
+
163
+
| Endpoint | Purpose |
164
+
|----------|---------|
165
+
| `GET /.well-known/oauth-authorization-server` | OAuth server metadata |
166
+
| `GET /.well-known/oauth-protected-resource` | Protected resource metadata |
167
+
| `GET /oauth/jwks` | JSON Web Key Set |
168
+
| `POST /oauth/par` | Pushed Authorization Request |
169
+
| `GET/POST /oauth/authorize` | Authorization endpoint |
170
+
| `POST /oauth/token` | Token endpoint (with DPoP) |
171
+
| `POST /oauth/revoke` | Token revocation |
172
+
173
+
---
174
+
175
+
## Summary
176
+
177
+
| Category | pds.js | atproto PDS |
178
+
|----------|--------|-------------|
179
+
| com.atproto.admin.* | 0 | 15 |
180
+
| com.atproto.identity.* | 1 | 9 |
181
+
| com.atproto.moderation.* | 0 | 1 |
182
+
| com.atproto.repo.* | 8 | 10 |
183
+
| com.atproto.server.* | 4 | 25 |
184
+
| com.atproto.sync.* | 8 | 14 |
185
+
| com.atproto.temp.* | 0 | 7 |
186
+
| app.bsky.* (native) | 2 | 2 |
187
+
| app.bsky.* (proxied) | ~87 | ~87 |
188
+
| **Total XRPC (native)** | **23** | **85** |
189
+
| Custom endpoints | 10 | 0 |
190
+
| OAuth endpoints | 7 | 7 (via @atproto/oauth-provider) |
+848
docs/plans/2026-01-05-auth-sessions.md
+848
docs/plans/2026-01-05-auth-sessions.md
···
1
+
# Authentication & Sessions Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Add authentication to the PDS so users can login from bsky.app and create records.
6
+
7
+
**Architecture:** JWT-based authentication using HMAC-SHA256. Password checked against `PDS_PASSWORD` env var. Access tokens expire in 2 hours, refresh tokens in 90 days. Write endpoints (`createRecord`, `deleteRecord`) require valid access token.
8
+
9
+
**Tech Stack:** Web Crypto API for HMAC signing, manual JWT encoding/decoding (no external deps)
10
+
11
+
---
12
+
13
+
## Task 1: Add JWT Helper Functions
14
+
15
+
**Files:**
16
+
- Modify: `src/pds.js:461-469` (after existing `base64UrlDecode`)
17
+
18
+
**Step 1: Write failing test for base64url encode/decode**
19
+
20
+
Add to `test/pds.test.js`:
21
+
22
+
```javascript
23
+
import {
24
+
cborEncode, cborDecode, createCid, cidToString, cidToBytes, base32Encode, createTid,
25
+
generateKeyPair, importPrivateKey, sign, bytesToHex, hexToBytes,
26
+
getKeyDepth, varint, base32Decode, buildCarFile,
27
+
base64UrlEncode, base64UrlDecode
28
+
} from '../src/pds.js'
29
+
30
+
// Add new test block after existing tests:
31
+
32
+
describe('JWT Base64URL', () => {
33
+
test('base64UrlEncode encodes bytes correctly', () => {
34
+
const input = new TextEncoder().encode('hello world')
35
+
const encoded = base64UrlEncode(input)
36
+
assert.strictEqual(encoded, 'aGVsbG8gd29ybGQ')
37
+
assert.ok(!encoded.includes('+'))
38
+
assert.ok(!encoded.includes('/'))
39
+
assert.ok(!encoded.includes('='))
40
+
})
41
+
42
+
test('base64UrlDecode decodes string correctly', () => {
43
+
const decoded = base64UrlDecode('aGVsbG8gd29ybGQ')
44
+
const str = new TextDecoder().decode(decoded)
45
+
assert.strictEqual(str, 'hello world')
46
+
})
47
+
48
+
test('base64url roundtrip', () => {
49
+
const original = new Uint8Array([0, 1, 2, 255, 254, 253])
50
+
const encoded = base64UrlEncode(original)
51
+
const decoded = base64UrlDecode(encoded)
52
+
assert.deepStrictEqual(decoded, original)
53
+
})
54
+
})
55
+
```
56
+
57
+
**Step 2: Run test to verify it fails**
58
+
59
+
Run: `npm test -- --test-name-pattern "JWT Base64URL"`
60
+
Expected: FAIL with "base64UrlEncode is not exported"
61
+
62
+
**Step 3: Implement base64url functions**
63
+
64
+
In `src/pds.js`, replace the existing `base64UrlDecode` function (around line 461) and add `base64UrlEncode`:
65
+
66
+
```javascript
67
+
/**
68
+
* Encode bytes as base64url string (no padding)
69
+
* @param {Uint8Array} bytes - Bytes to encode
70
+
* @returns {string} Base64url-encoded string
71
+
*/
72
+
export function base64UrlEncode(bytes) {
73
+
let binary = ''
74
+
for (const byte of bytes) {
75
+
binary += String.fromCharCode(byte)
76
+
}
77
+
const base64 = btoa(binary)
78
+
return base64.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '')
79
+
}
80
+
81
+
/**
82
+
* Decode base64url string to bytes
83
+
* @param {string} str - Base64url-encoded string
84
+
* @returns {Uint8Array} Decoded bytes
85
+
*/
86
+
export function base64UrlDecode(str) {
87
+
const base64 = str.replace(/-/g, '+').replace(/_/g, '/')
88
+
const pad = base64.length % 4
89
+
const padded = pad ? base64 + '='.repeat(4 - pad) : base64
90
+
const binary = atob(padded)
91
+
const bytes = new Uint8Array(binary.length)
92
+
for (let i = 0; i < binary.length; i++) {
93
+
bytes[i] = binary.charCodeAt(i)
94
+
}
95
+
return bytes
96
+
}
97
+
```
98
+
99
+
**Step 4: Run test to verify it passes**
100
+
101
+
Run: `npm test -- --test-name-pattern "JWT Base64URL"`
102
+
Expected: PASS
103
+
104
+
**Step 5: Commit**
105
+
106
+
```bash
107
+
git add src/pds.js test/pds.test.js
108
+
git commit -m "feat: add base64url encode/decode helpers for JWT"
109
+
```
110
+
111
+
---
112
+
113
+
## Task 2: Add JWT Creation Functions
114
+
115
+
**Files:**
116
+
- Modify: `src/pds.js` (add after base64url functions, around line 490)
117
+
118
+
**Step 1: Write failing test for JWT creation**
119
+
120
+
Add to `test/pds.test.js`:
121
+
122
+
```javascript
123
+
import {
124
+
// ... existing imports ...
125
+
base64UrlEncode, base64UrlDecode,
126
+
createAccessJwt, createRefreshJwt
127
+
} from '../src/pds.js'
128
+
129
+
describe('JWT Creation', () => {
130
+
test('createAccessJwt creates valid JWT structure', async () => {
131
+
const did = 'did:web:test.example'
132
+
const secret = 'test-secret-key'
133
+
const jwt = await createAccessJwt(did, secret)
134
+
135
+
const parts = jwt.split('.')
136
+
assert.strictEqual(parts.length, 3)
137
+
138
+
// Decode header
139
+
const header = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[0])))
140
+
assert.strictEqual(header.typ, 'at+jwt')
141
+
assert.strictEqual(header.alg, 'HS256')
142
+
143
+
// Decode payload
144
+
const payload = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[1])))
145
+
assert.strictEqual(payload.scope, 'com.atproto.access')
146
+
assert.strictEqual(payload.sub, did)
147
+
assert.strictEqual(payload.aud, did)
148
+
assert.ok(payload.iat > 0)
149
+
assert.ok(payload.exp > payload.iat)
150
+
})
151
+
152
+
test('createRefreshJwt creates valid JWT with jti', async () => {
153
+
const did = 'did:web:test.example'
154
+
const secret = 'test-secret-key'
155
+
const jwt = await createRefreshJwt(did, secret)
156
+
157
+
const parts = jwt.split('.')
158
+
const header = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[0])))
159
+
assert.strictEqual(header.typ, 'refresh+jwt')
160
+
161
+
const payload = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[1])))
162
+
assert.strictEqual(payload.scope, 'com.atproto.refresh')
163
+
assert.ok(payload.jti) // has unique token ID
164
+
})
165
+
})
166
+
```
167
+
168
+
**Step 2: Run test to verify it fails**
169
+
170
+
Run: `npm test -- --test-name-pattern "JWT Creation"`
171
+
Expected: FAIL with "createAccessJwt is not exported"
172
+
173
+
**Step 3: Implement JWT creation functions**
174
+
175
+
Add to `src/pds.js` after base64url functions:
176
+
177
+
```javascript
178
+
/**
179
+
* Create HMAC-SHA256 signature for JWT
180
+
* @param {string} data - Data to sign (header.payload)
181
+
* @param {string} secret - Secret key
182
+
* @returns {Promise<string>} Base64url-encoded signature
183
+
*/
184
+
async function hmacSign(data, secret) {
185
+
const key = await crypto.subtle.importKey(
186
+
'raw',
187
+
new TextEncoder().encode(secret),
188
+
{ name: 'HMAC', hash: 'SHA-256' },
189
+
false,
190
+
['sign']
191
+
)
192
+
const sig = await crypto.subtle.sign('HMAC', key, new TextEncoder().encode(data))
193
+
return base64UrlEncode(new Uint8Array(sig))
194
+
}
195
+
196
+
/**
197
+
* Create an access JWT for ATProto
198
+
* @param {string} did - User's DID (subject and audience)
199
+
* @param {string} secret - JWT signing secret
200
+
* @param {number} [expiresIn=7200] - Expiration in seconds (default 2 hours)
201
+
* @returns {Promise<string>} Signed JWT
202
+
*/
203
+
export async function createAccessJwt(did, secret, expiresIn = 7200) {
204
+
const header = { typ: 'at+jwt', alg: 'HS256' }
205
+
const now = Math.floor(Date.now() / 1000)
206
+
const payload = {
207
+
scope: 'com.atproto.access',
208
+
sub: did,
209
+
aud: did,
210
+
iat: now,
211
+
exp: now + expiresIn
212
+
}
213
+
214
+
const headerB64 = base64UrlEncode(new TextEncoder().encode(JSON.stringify(header)))
215
+
const payloadB64 = base64UrlEncode(new TextEncoder().encode(JSON.stringify(payload)))
216
+
const signature = await hmacSign(`${headerB64}.${payloadB64}`, secret)
217
+
218
+
return `${headerB64}.${payloadB64}.${signature}`
219
+
}
220
+
221
+
/**
222
+
* Create a refresh JWT for ATProto
223
+
* @param {string} did - User's DID (subject and audience)
224
+
* @param {string} secret - JWT signing secret
225
+
* @param {number} [expiresIn=7776000] - Expiration in seconds (default 90 days)
226
+
* @returns {Promise<string>} Signed JWT
227
+
*/
228
+
export async function createRefreshJwt(did, secret, expiresIn = 7776000) {
229
+
const header = { typ: 'refresh+jwt', alg: 'HS256' }
230
+
const now = Math.floor(Date.now() / 1000)
231
+
// Generate random jti (token ID)
232
+
const jtiBytes = new Uint8Array(32)
233
+
crypto.getRandomValues(jtiBytes)
234
+
const jti = base64UrlEncode(jtiBytes)
235
+
236
+
const payload = {
237
+
scope: 'com.atproto.refresh',
238
+
sub: did,
239
+
aud: did,
240
+
jti,
241
+
iat: now,
242
+
exp: now + expiresIn
243
+
}
244
+
245
+
const headerB64 = base64UrlEncode(new TextEncoder().encode(JSON.stringify(header)))
246
+
const payloadB64 = base64UrlEncode(new TextEncoder().encode(JSON.stringify(payload)))
247
+
const signature = await hmacSign(`${headerB64}.${payloadB64}`, secret)
248
+
249
+
return `${headerB64}.${payloadB64}.${signature}`
250
+
}
251
+
```
252
+
253
+
**Step 4: Run test to verify it passes**
254
+
255
+
Run: `npm test -- --test-name-pattern "JWT Creation"`
256
+
Expected: PASS
257
+
258
+
**Step 5: Commit**
259
+
260
+
```bash
261
+
git add src/pds.js test/pds.test.js
262
+
git commit -m "feat: add JWT creation functions for access and refresh tokens"
263
+
```
264
+
265
+
---
266
+
267
+
## Task 3: Add JWT Verification Function
268
+
269
+
**Files:**
270
+
- Modify: `src/pds.js` (add after JWT creation functions)
271
+
272
+
**Step 1: Write failing test for JWT verification**
273
+
274
+
Add to `test/pds.test.js`:
275
+
276
+
```javascript
277
+
import {
278
+
// ... existing imports ...
279
+
createAccessJwt, createRefreshJwt,
280
+
verifyAccessJwt
281
+
} from '../src/pds.js'
282
+
283
+
describe('JWT Verification', () => {
284
+
test('verifyAccessJwt returns payload for valid token', async () => {
285
+
const did = 'did:web:test.example'
286
+
const secret = 'test-secret-key'
287
+
const jwt = await createAccessJwt(did, secret)
288
+
289
+
const payload = await verifyAccessJwt(jwt, secret)
290
+
assert.strictEqual(payload.sub, did)
291
+
assert.strictEqual(payload.scope, 'com.atproto.access')
292
+
})
293
+
294
+
test('verifyAccessJwt throws for wrong secret', async () => {
295
+
const did = 'did:web:test.example'
296
+
const jwt = await createAccessJwt(did, 'correct-secret')
297
+
298
+
await assert.rejects(
299
+
() => verifyAccessJwt(jwt, 'wrong-secret'),
300
+
/invalid signature/i
301
+
)
302
+
})
303
+
304
+
test('verifyAccessJwt throws for expired token', async () => {
305
+
const did = 'did:web:test.example'
306
+
const secret = 'test-secret-key'
307
+
// Create token that expired 1 second ago
308
+
const jwt = await createAccessJwt(did, secret, -1)
309
+
310
+
await assert.rejects(
311
+
() => verifyAccessJwt(jwt, secret),
312
+
/expired/i
313
+
)
314
+
})
315
+
316
+
test('verifyAccessJwt throws for refresh token', async () => {
317
+
const did = 'did:web:test.example'
318
+
const secret = 'test-secret-key'
319
+
const jwt = await createRefreshJwt(did, secret)
320
+
321
+
await assert.rejects(
322
+
() => verifyAccessJwt(jwt, secret),
323
+
/invalid token type/i
324
+
)
325
+
})
326
+
})
327
+
```
328
+
329
+
**Step 2: Run test to verify it fails**
330
+
331
+
Run: `npm test -- --test-name-pattern "JWT Verification"`
332
+
Expected: FAIL with "verifyAccessJwt is not exported"
333
+
334
+
**Step 3: Implement JWT verification**
335
+
336
+
Add to `src/pds.js` after JWT creation functions:
337
+
338
+
```javascript
339
+
/**
340
+
* Verify and decode an access JWT
341
+
* @param {string} jwt - JWT string to verify
342
+
* @param {string} secret - JWT signing secret
343
+
* @returns {Promise<Object>} Decoded payload
344
+
* @throws {Error} If token is invalid, expired, or wrong type
345
+
*/
346
+
export async function verifyAccessJwt(jwt, secret) {
347
+
const parts = jwt.split('.')
348
+
if (parts.length !== 3) {
349
+
throw new Error('Invalid JWT format')
350
+
}
351
+
352
+
const [headerB64, payloadB64, signatureB64] = parts
353
+
354
+
// Verify signature
355
+
const expectedSig = await hmacSign(`${headerB64}.${payloadB64}`, secret)
356
+
if (signatureB64 !== expectedSig) {
357
+
throw new Error('Invalid signature')
358
+
}
359
+
360
+
// Decode header and payload
361
+
const header = JSON.parse(new TextDecoder().decode(base64UrlDecode(headerB64)))
362
+
const payload = JSON.parse(new TextDecoder().decode(base64UrlDecode(payloadB64)))
363
+
364
+
// Check token type
365
+
if (header.typ !== 'at+jwt') {
366
+
throw new Error('Invalid token type: expected access token')
367
+
}
368
+
369
+
// Check expiration
370
+
const now = Math.floor(Date.now() / 1000)
371
+
if (payload.exp && payload.exp < now) {
372
+
throw new Error('Token expired')
373
+
}
374
+
375
+
return payload
376
+
}
377
+
```
378
+
379
+
**Step 4: Run test to verify it passes**
380
+
381
+
Run: `npm test -- --test-name-pattern "JWT Verification"`
382
+
Expected: PASS
383
+
384
+
**Step 5: Commit**
385
+
386
+
```bash
387
+
git add src/pds.js test/pds.test.js
388
+
git commit -m "feat: add JWT verification function"
389
+
```
390
+
391
+
---
392
+
393
+
## Task 4: Add createSession Endpoint
394
+
395
+
**Files:**
396
+
- Modify: `src/pds.js:869-940` (add to pdsRoutes)
397
+
- Modify: `src/pds.js` (add handler method to PersonalDataServer class)
398
+
399
+
**Step 1: Add route to pdsRoutes**
400
+
401
+
In `src/pds.js`, add to the `pdsRoutes` object (around line 902, after describeServer):
402
+
403
+
```javascript
404
+
'/xrpc/com.atproto.server.createSession': {
405
+
method: 'POST',
406
+
handler: (pds, req, url) => pds.handleCreateSession(req)
407
+
},
408
+
```
409
+
410
+
**Step 2: Add handler method**
411
+
412
+
Add to `PersonalDataServer` class (after `handleDescribeServer`, around line 1427):
413
+
414
+
```javascript
415
+
async handleCreateSession(request) {
416
+
const body = await request.json()
417
+
const { identifier, password } = body
418
+
419
+
if (!identifier || !password) {
420
+
return Response.json({
421
+
error: 'InvalidRequest',
422
+
message: 'Missing identifier or password'
423
+
}, { status: 400 })
424
+
}
425
+
426
+
// Check password against env var
427
+
const expectedPassword = this.env?.PDS_PASSWORD
428
+
if (!expectedPassword || password !== expectedPassword) {
429
+
return Response.json({
430
+
error: 'AuthenticationRequired',
431
+
message: 'Invalid identifier or password'
432
+
}, { status: 401 })
433
+
}
434
+
435
+
// Resolve identifier to DID
436
+
let did = identifier
437
+
if (!identifier.startsWith('did:')) {
438
+
// Try to resolve handle
439
+
const handleMap = await this.state.storage.get('handleMap') || {}
440
+
did = handleMap[identifier]
441
+
if (!did) {
442
+
return Response.json({
443
+
error: 'InvalidRequest',
444
+
message: 'Unable to resolve handle'
445
+
}, { status: 400 })
446
+
}
447
+
}
448
+
449
+
// Get handle for response
450
+
const handle = await this.getHandleForDid(did)
451
+
452
+
// Create tokens
453
+
const jwtSecret = this.env?.JWT_SECRET
454
+
if (!jwtSecret) {
455
+
return Response.json({
456
+
error: 'InternalServerError',
457
+
message: 'Server not configured for authentication'
458
+
}, { status: 500 })
459
+
}
460
+
461
+
const accessJwt = await createAccessJwt(did, jwtSecret)
462
+
const refreshJwt = await createRefreshJwt(did, jwtSecret)
463
+
464
+
return Response.json({
465
+
accessJwt,
466
+
refreshJwt,
467
+
handle: handle || did,
468
+
did,
469
+
active: true
470
+
})
471
+
}
472
+
473
+
async getHandleForDid(did) {
474
+
// Check if this DID has a handle registered
475
+
const handleMap = await this.state.storage.get('handleMap') || {}
476
+
for (const [handle, mappedDid] of Object.entries(handleMap)) {
477
+
if (mappedDid === did) return handle
478
+
}
479
+
// Check instance's own handle
480
+
const instanceDid = await this.getDid()
481
+
if (instanceDid === did) {
482
+
return await this.state.storage.get('handle')
483
+
}
484
+
return null
485
+
}
486
+
```
487
+
488
+
**Step 3: Add route in main handleRequest**
489
+
490
+
In `src/pds.js`, in the `handleRequest` function (around line 1796), add handling for createSession right after describeServer:
491
+
492
+
```javascript
493
+
// createSession - handle on default DO (has handleMap for identifier resolution)
494
+
if (url.pathname === '/xrpc/com.atproto.server.createSession') {
495
+
const defaultId = env.PDS.idFromName('default')
496
+
const defaultPds = env.PDS.get(defaultId)
497
+
return defaultPds.fetch(request)
498
+
}
499
+
```
500
+
501
+
**Step 4: Test manually**
502
+
503
+
Deploy and test:
504
+
```bash
505
+
npx wrangler deploy
506
+
curl -X POST 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.server.createSession' \
507
+
-H 'Content-Type: application/json' \
508
+
-d '{"identifier":"chad-pds.chad-53c.workers.dev","password":"YOUR_PASSWORD"}'
509
+
```
510
+
511
+
Expected: JSON response with `accessJwt`, `refreshJwt`, `handle`, `did`, `active`
512
+
513
+
**Step 5: Commit**
514
+
515
+
```bash
516
+
git add src/pds.js
517
+
git commit -m "feat: add com.atproto.server.createSession endpoint"
518
+
```
519
+
520
+
---
521
+
522
+
## Task 5: Add getSession Endpoint
523
+
524
+
**Files:**
525
+
- Modify: `src/pds.js` (add route and handler)
526
+
527
+
**Step 1: Add route to pdsRoutes**
528
+
529
+
In `src/pds.js`, add to the `pdsRoutes` object (after createSession):
530
+
531
+
```javascript
532
+
'/xrpc/com.atproto.server.getSession': {
533
+
handler: (pds, req, url) => pds.handleGetSession(req)
534
+
},
535
+
```
536
+
537
+
**Step 2: Add handler method**
538
+
539
+
Add to `PersonalDataServer` class (after `handleCreateSession`):
540
+
541
+
```javascript
542
+
async handleGetSession(request) {
543
+
const authHeader = request.headers.get('Authorization')
544
+
if (!authHeader || !authHeader.startsWith('Bearer ')) {
545
+
return Response.json({
546
+
error: 'AuthenticationRequired',
547
+
message: 'Missing or invalid authorization header'
548
+
}, { status: 401 })
549
+
}
550
+
551
+
const token = authHeader.slice(7) // Remove 'Bearer '
552
+
const jwtSecret = this.env?.JWT_SECRET
553
+
if (!jwtSecret) {
554
+
return Response.json({
555
+
error: 'InternalServerError',
556
+
message: 'Server not configured for authentication'
557
+
}, { status: 500 })
558
+
}
559
+
560
+
try {
561
+
const payload = await verifyAccessJwt(token, jwtSecret)
562
+
const did = payload.sub
563
+
const handle = await this.getHandleForDid(did)
564
+
565
+
return Response.json({
566
+
handle: handle || did,
567
+
did,
568
+
active: true
569
+
})
570
+
} catch (err) {
571
+
return Response.json({
572
+
error: 'InvalidToken',
573
+
message: err.message
574
+
}, { status: 401 })
575
+
}
576
+
}
577
+
```
578
+
579
+
**Step 3: Add route in main handleRequest**
580
+
581
+
In `src/pds.js`, in the `handleRequest` function, add handling for getSession (after createSession):
582
+
583
+
```javascript
584
+
// getSession - route to default DO
585
+
if (url.pathname === '/xrpc/com.atproto.server.getSession') {
586
+
const defaultId = env.PDS.idFromName('default')
587
+
const defaultPds = env.PDS.get(defaultId)
588
+
return defaultPds.fetch(request)
589
+
}
590
+
```
591
+
592
+
**Step 4: Test manually**
593
+
594
+
```bash
595
+
# First get a token
596
+
TOKEN=$(curl -s -X POST 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.server.createSession' \
597
+
-H 'Content-Type: application/json' \
598
+
-d '{"identifier":"chad-pds.chad-53c.workers.dev","password":"YOUR_PASSWORD"}' | jq -r '.accessJwt')
599
+
600
+
# Then test getSession
601
+
curl 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.server.getSession' \
602
+
-H "Authorization: Bearer $TOKEN"
603
+
```
604
+
605
+
Expected: JSON response with `handle`, `did`, `active`
606
+
607
+
**Step 5: Commit**
608
+
609
+
```bash
610
+
git add src/pds.js
611
+
git commit -m "feat: add com.atproto.server.getSession endpoint"
612
+
```
613
+
614
+
---
615
+
616
+
## Task 6: Add Auth Middleware and Protect Write Endpoints
617
+
618
+
**Files:**
619
+
- Modify: `src/pds.js` (add requireAuth helper, modify createRecord/deleteRecord handlers)
620
+
621
+
**Step 1: Add requireAuth helper function**
622
+
623
+
Add to `src/pds.js` (before the `handleRequest` function, around line 1774):
624
+
625
+
```javascript
626
+
/**
627
+
* Verify auth and return DID from token
628
+
* @param {Request} request - HTTP request with Authorization header
629
+
* @param {Object} env - Environment with JWT_SECRET
630
+
* @returns {Promise<{did: string} | {error: Response}>} DID or error response
631
+
*/
632
+
async function requireAuth(request, env) {
633
+
const authHeader = request.headers.get('Authorization')
634
+
if (!authHeader || !authHeader.startsWith('Bearer ')) {
635
+
return {
636
+
error: Response.json({
637
+
error: 'AuthenticationRequired',
638
+
message: 'Authentication required'
639
+
}, { status: 401 })
640
+
}
641
+
}
642
+
643
+
const token = authHeader.slice(7)
644
+
const jwtSecret = env?.JWT_SECRET
645
+
if (!jwtSecret) {
646
+
return {
647
+
error: Response.json({
648
+
error: 'InternalServerError',
649
+
message: 'Server not configured for authentication'
650
+
}, { status: 500 })
651
+
}
652
+
}
653
+
654
+
try {
655
+
const payload = await verifyAccessJwt(token, jwtSecret)
656
+
return { did: payload.sub }
657
+
} catch (err) {
658
+
return {
659
+
error: Response.json({
660
+
error: 'InvalidToken',
661
+
message: err.message
662
+
}, { status: 401 })
663
+
}
664
+
}
665
+
}
666
+
```
667
+
668
+
**Step 2: Modify createRecord in handleRequest**
669
+
670
+
In `src/pds.js`, find the createRecord handling in `handleRequest` (around line 1854) and update it:
671
+
672
+
```javascript
673
+
// POST repo endpoints have repo in body - REQUIRE AUTH
674
+
if (url.pathname === '/xrpc/com.atproto.repo.createRecord') {
675
+
// Check auth first
676
+
const auth = await requireAuth(request, env)
677
+
if (auth.error) return auth.error
678
+
679
+
// Clone request to read body
680
+
const body = await request.json()
681
+
const repo = body.repo
682
+
if (!repo) {
683
+
return Response.json({ error: 'InvalidRequest', message: 'missing repo param' }, { status: 400 })
684
+
}
685
+
686
+
// Verify authenticated user matches repo
687
+
if (auth.did !== repo) {
688
+
return Response.json({
689
+
error: 'Forbidden',
690
+
message: 'Cannot write to another user\'s repo'
691
+
}, { status: 403 })
692
+
}
693
+
694
+
const id = env.PDS.idFromName(repo)
695
+
const pds = env.PDS.get(id)
696
+
return pds.fetch(new Request(request.url, {
697
+
method: 'POST',
698
+
headers: request.headers,
699
+
body: JSON.stringify(body)
700
+
}))
701
+
}
702
+
```
703
+
704
+
**Step 3: Modify deleteRecord in handleRequest**
705
+
706
+
Update the deleteRecord handling similarly:
707
+
708
+
```javascript
709
+
if (url.pathname === '/xrpc/com.atproto.repo.deleteRecord') {
710
+
// Check auth first
711
+
const auth = await requireAuth(request, env)
712
+
if (auth.error) return auth.error
713
+
714
+
const body = await request.json()
715
+
const repo = body.repo
716
+
if (!repo) {
717
+
return Response.json({ error: 'InvalidRequest', message: 'missing repo param' }, { status: 400 })
718
+
}
719
+
720
+
// Verify authenticated user matches repo
721
+
if (auth.did !== repo) {
722
+
return Response.json({
723
+
error: 'Forbidden',
724
+
message: 'Cannot modify another user\'s repo'
725
+
}, { status: 403 })
726
+
}
727
+
728
+
const id = env.PDS.idFromName(repo)
729
+
const pds = env.PDS.get(id)
730
+
return pds.fetch(new Request(request.url, {
731
+
method: 'POST',
732
+
headers: request.headers,
733
+
body: JSON.stringify(body)
734
+
}))
735
+
}
736
+
```
737
+
738
+
**Step 4: Test auth protection**
739
+
740
+
```bash
741
+
# Without auth - should fail
742
+
curl -X POST 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.repo.createRecord' \
743
+
-H 'Content-Type: application/json' \
744
+
-d '{"repo":"did:web:chad-pds.chad-53c.workers.dev","collection":"app.bsky.feed.post","record":{"text":"test","createdAt":"2024-01-01T00:00:00Z"}}'
745
+
# Expected: 401 AuthenticationRequired
746
+
747
+
# With auth - should work
748
+
TOKEN=$(curl -s -X POST 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.server.createSession' \
749
+
-H 'Content-Type: application/json' \
750
+
-d '{"identifier":"chad-pds.chad-53c.workers.dev","password":"YOUR_PASSWORD"}' | jq -r '.accessJwt')
751
+
752
+
curl -X POST 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.repo.createRecord' \
753
+
-H 'Content-Type: application/json' \
754
+
-H "Authorization: Bearer $TOKEN" \
755
+
-d '{"repo":"did:web:chad-pds.chad-53c.workers.dev","collection":"app.bsky.feed.post","record":{"text":"test","createdAt":"2024-01-01T00:00:00Z"}}'
756
+
# Expected: 200 with uri, cid, commit
757
+
```
758
+
759
+
**Step 5: Commit**
760
+
761
+
```bash
762
+
git add src/pds.js
763
+
git commit -m "feat: protect createRecord and deleteRecord with JWT auth"
764
+
```
765
+
766
+
---
767
+
768
+
## Task 7: Configure Environment Variables
769
+
770
+
**Files:**
771
+
- Modify: `wrangler.toml` (optional - can use wrangler secret instead)
772
+
773
+
**Step 1: Set secrets using wrangler**
774
+
775
+
```bash
776
+
# Set the password for login
777
+
npx wrangler secret put PDS_PASSWORD
778
+
# Enter your password when prompted
779
+
780
+
# Set the JWT signing secret (generate a random string)
781
+
npx wrangler secret put JWT_SECRET
782
+
# Enter a long random string (e.g., openssl rand -base64 32)
783
+
```
784
+
785
+
**Step 2: Deploy and verify**
786
+
787
+
```bash
788
+
npx wrangler deploy
789
+
```
790
+
791
+
**Step 3: Test full flow**
792
+
793
+
```bash
794
+
# Login
795
+
curl -X POST 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.server.createSession' \
796
+
-H 'Content-Type: application/json' \
797
+
-d '{"identifier":"chad-pds.chad-53c.workers.dev","password":"YOUR_PASSWORD"}'
798
+
```
799
+
800
+
---
801
+
802
+
## Task 8: Test with Bluesky App
803
+
804
+
**Step 1: Open bsky.app**
805
+
806
+
Go to https://bsky.app and click "Sign in"
807
+
808
+
**Step 2: Enter custom PDS**
809
+
810
+
Click "Hosting provider" and enter your PDS URL: `chad-pds.chad-53c.workers.dev`
811
+
812
+
**Step 3: Login**
813
+
814
+
Enter your handle (e.g., `chad-pds.chad-53c.workers.dev`) and password.
815
+
816
+
**Step 4: Verify login works**
817
+
818
+
You should see your profile. Try creating a post to verify write access works.
819
+
820
+
**Step 5: Final commit**
821
+
822
+
```bash
823
+
git add -A
824
+
git commit -m "feat: complete authentication implementation for Bluesky app login"
825
+
```
826
+
827
+
---
828
+
829
+
## Summary of Changes
830
+
831
+
1. **New exports in `src/pds.js`:**
832
+
- `base64UrlEncode(bytes)` - Encode bytes to base64url
833
+
- `base64UrlDecode(str)` - Decode base64url to bytes
834
+
- `createAccessJwt(did, secret)` - Create access token
835
+
- `createRefreshJwt(did, secret)` - Create refresh token
836
+
- `verifyAccessJwt(jwt, secret)` - Verify access token
837
+
838
+
2. **New endpoints:**
839
+
- `POST /xrpc/com.atproto.server.createSession` - Login
840
+
- `GET /xrpc/com.atproto.server.getSession` - Verify session
841
+
842
+
3. **Modified endpoints:**
843
+
- `POST /xrpc/com.atproto.repo.createRecord` - Now requires auth
844
+
- `POST /xrpc/com.atproto.repo.deleteRecord` - Now requires auth
845
+
846
+
4. **Environment variables:**
847
+
- `PDS_PASSWORD` - Password for login
848
+
- `JWT_SECRET` - Secret for signing JWTs
+888
docs/plans/2026-01-06-blob-support.md
+888
docs/plans/2026-01-06-blob-support.md
···
1
+
# Blob Support Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Add blob (image/video) upload, storage, and retrieval to the PDS using Cloudflare R2.
6
+
7
+
**Architecture:** Blobs stored in R2 bucket keyed by `{did}/{cid}`. Metadata tracked in SQLite tables (`blob`, `record_blob`) within each Durable Object. Orphan cleanup via DO alarm. MIME sniffing for security.
8
+
9
+
**Tech Stack:** Cloudflare R2, Durable Object SQLite, Web Crypto API (SHA-256 for CID generation)
10
+
11
+
---
12
+
13
+
## Task 1: Add R2 Bucket Binding
14
+
15
+
**Files:**
16
+
- Modify: `wrangler.toml`
17
+
18
+
**Step 1: Add R2 binding to wrangler.toml**
19
+
20
+
Add after the existing migrations section:
21
+
22
+
```toml
23
+
[[r2_buckets]]
24
+
binding = "BLOBS"
25
+
bucket_name = "pds-blobs"
26
+
```
27
+
28
+
**Step 2: Create R2 bucket (if not exists)**
29
+
30
+
Run: `npx wrangler r2 bucket create pds-blobs`
31
+
32
+
**Step 3: Commit**
33
+
34
+
```bash
35
+
git add wrangler.toml
36
+
git commit -m "feat: add R2 bucket binding for blob storage"
37
+
```
38
+
39
+
---
40
+
41
+
## Task 2: Add Blob Database Schema
42
+
43
+
**Files:**
44
+
- Modify: `src/pds.js:1162-1190` (constructor schema initialization)
45
+
46
+
**Step 1: Add blob and record_blob tables**
47
+
48
+
In the `PersonalDataServer` constructor, after the existing `CREATE TABLE` statements (around line 1186), add:
49
+
50
+
```javascript
51
+
CREATE TABLE IF NOT EXISTS blob (
52
+
cid TEXT PRIMARY KEY,
53
+
mimeType TEXT NOT NULL,
54
+
size INTEGER NOT NULL,
55
+
createdAt TEXT NOT NULL
56
+
);
57
+
58
+
CREATE TABLE IF NOT EXISTS record_blob (
59
+
blobCid TEXT NOT NULL,
60
+
recordUri TEXT NOT NULL,
61
+
PRIMARY KEY (blobCid, recordUri)
62
+
);
63
+
```
64
+
65
+
**Step 2: Test schema creation manually**
66
+
67
+
Deploy and verify tables exist:
68
+
```bash
69
+
npx wrangler deploy
70
+
```
71
+
72
+
**Step 3: Commit**
73
+
74
+
```bash
75
+
git add src/pds.js
76
+
git commit -m "feat: add blob and record_blob tables to schema"
77
+
```
78
+
79
+
---
80
+
81
+
## Task 3: Implement MIME Type Sniffing
82
+
83
+
**Files:**
84
+
- Modify: `src/pds.js` (add after error helper, around line 30)
85
+
- Test: `test/pds.test.js`
86
+
87
+
**Step 1: Write the failing test**
88
+
89
+
Add to `test/pds.test.js`:
90
+
91
+
```javascript
92
+
import {
93
+
// ... existing imports ...
94
+
sniffMimeType,
95
+
} from '../src/pds.js';
96
+
97
+
describe('MIME Type Sniffing', () => {
98
+
test('detects JPEG', () => {
99
+
const bytes = new Uint8Array([0xFF, 0xD8, 0xFF, 0xE0, 0x00, 0x10]);
100
+
assert.strictEqual(sniffMimeType(bytes), 'image/jpeg');
101
+
});
102
+
103
+
test('detects PNG', () => {
104
+
const bytes = new Uint8Array([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]);
105
+
assert.strictEqual(sniffMimeType(bytes), 'image/png');
106
+
});
107
+
108
+
test('detects GIF', () => {
109
+
const bytes = new Uint8Array([0x47, 0x49, 0x46, 0x38, 0x39, 0x61]);
110
+
assert.strictEqual(sniffMimeType(bytes), 'image/gif');
111
+
});
112
+
113
+
test('detects WebP', () => {
114
+
const bytes = new Uint8Array([
115
+
0x52, 0x49, 0x46, 0x46, // RIFF
116
+
0x00, 0x00, 0x00, 0x00, // size (ignored)
117
+
0x57, 0x45, 0x42, 0x50, // WEBP
118
+
]);
119
+
assert.strictEqual(sniffMimeType(bytes), 'image/webp');
120
+
});
121
+
122
+
test('detects MP4', () => {
123
+
const bytes = new Uint8Array([
124
+
0x00, 0x00, 0x00, 0x18, // size
125
+
0x66, 0x74, 0x79, 0x70, // ftyp
126
+
]);
127
+
assert.strictEqual(sniffMimeType(bytes), 'video/mp4');
128
+
});
129
+
130
+
test('returns null for unknown', () => {
131
+
const bytes = new Uint8Array([0x00, 0x01, 0x02, 0x03]);
132
+
assert.strictEqual(sniffMimeType(bytes), null);
133
+
});
134
+
});
135
+
```
136
+
137
+
**Step 2: Run test to verify it fails**
138
+
139
+
Run: `npm test`
140
+
Expected: FAIL with "sniffMimeType is not exported"
141
+
142
+
**Step 3: Write minimal implementation**
143
+
144
+
Add to `src/pds.js` after the error helper (around line 30):
145
+
146
+
```javascript
147
+
// === MIME TYPE SNIFFING ===
148
+
// Detect file type from magic bytes (first 12 bytes)
149
+
150
+
/**
151
+
* Sniff MIME type from file magic bytes
152
+
* @param {Uint8Array|ArrayBuffer} bytes - File bytes (only first 12 needed)
153
+
* @returns {string|null} Detected MIME type or null if unknown
154
+
*/
155
+
export function sniffMimeType(bytes) {
156
+
const arr = new Uint8Array(bytes.slice(0, 12));
157
+
158
+
// JPEG: FF D8 FF
159
+
if (arr[0] === 0xff && arr[1] === 0xd8 && arr[2] === 0xff) {
160
+
return 'image/jpeg';
161
+
}
162
+
163
+
// PNG: 89 50 4E 47 0D 0A 1A 0A
164
+
if (
165
+
arr[0] === 0x89 &&
166
+
arr[1] === 0x50 &&
167
+
arr[2] === 0x4e &&
168
+
arr[3] === 0x47 &&
169
+
arr[4] === 0x0d &&
170
+
arr[5] === 0x0a &&
171
+
arr[6] === 0x1a &&
172
+
arr[7] === 0x0a
173
+
) {
174
+
return 'image/png';
175
+
}
176
+
177
+
// GIF: 47 49 46 38 (GIF8)
178
+
if (
179
+
arr[0] === 0x47 &&
180
+
arr[1] === 0x49 &&
181
+
arr[2] === 0x46 &&
182
+
arr[3] === 0x38
183
+
) {
184
+
return 'image/gif';
185
+
}
186
+
187
+
// WebP: RIFF....WEBP
188
+
if (
189
+
arr[0] === 0x52 &&
190
+
arr[1] === 0x49 &&
191
+
arr[2] === 0x46 &&
192
+
arr[3] === 0x46 &&
193
+
arr[8] === 0x57 &&
194
+
arr[9] === 0x45 &&
195
+
arr[10] === 0x42 &&
196
+
arr[11] === 0x50
197
+
) {
198
+
return 'image/webp';
199
+
}
200
+
201
+
// MP4/MOV: ....ftyp at byte 4
202
+
if (
203
+
arr[4] === 0x66 &&
204
+
arr[5] === 0x74 &&
205
+
arr[6] === 0x79 &&
206
+
arr[7] === 0x70
207
+
) {
208
+
return 'video/mp4';
209
+
}
210
+
211
+
return null;
212
+
}
213
+
```
214
+
215
+
**Step 4: Run test to verify it passes**
216
+
217
+
Run: `npm test`
218
+
Expected: PASS
219
+
220
+
**Step 5: Commit**
221
+
222
+
```bash
223
+
git add src/pds.js test/pds.test.js
224
+
git commit -m "feat: add MIME type sniffing from magic bytes"
225
+
```
226
+
227
+
---
228
+
229
+
## Task 4: Implement Blob Ref Detection
230
+
231
+
**Files:**
232
+
- Modify: `src/pds.js` (add after sniffMimeType)
233
+
- Test: `test/pds.test.js`
234
+
235
+
**Step 1: Write the failing test**
236
+
237
+
Add to `test/pds.test.js`:
238
+
239
+
```javascript
240
+
import {
241
+
// ... existing imports ...
242
+
findBlobRefs,
243
+
} from '../src/pds.js';
244
+
245
+
describe('Blob Ref Detection', () => {
246
+
test('finds blob ref in simple object', () => {
247
+
const record = {
248
+
$type: 'app.bsky.feed.post',
249
+
text: 'Hello',
250
+
embed: {
251
+
$type: 'app.bsky.embed.images',
252
+
images: [
253
+
{
254
+
image: {
255
+
$type: 'blob',
256
+
ref: { $link: 'bafkreiabc123' },
257
+
mimeType: 'image/jpeg',
258
+
size: 1234,
259
+
},
260
+
alt: 'test image',
261
+
},
262
+
],
263
+
},
264
+
};
265
+
const refs = findBlobRefs(record);
266
+
assert.deepStrictEqual(refs, ['bafkreiabc123']);
267
+
});
268
+
269
+
test('finds multiple blob refs', () => {
270
+
const record = {
271
+
images: [
272
+
{ image: { $type: 'blob', ref: { $link: 'cid1' }, mimeType: 'image/png', size: 100 } },
273
+
{ image: { $type: 'blob', ref: { $link: 'cid2' }, mimeType: 'image/png', size: 200 } },
274
+
],
275
+
};
276
+
const refs = findBlobRefs(record);
277
+
assert.deepStrictEqual(refs, ['cid1', 'cid2']);
278
+
});
279
+
280
+
test('returns empty array when no blobs', () => {
281
+
const record = { text: 'Hello world', count: 42 };
282
+
const refs = findBlobRefs(record);
283
+
assert.deepStrictEqual(refs, []);
284
+
});
285
+
286
+
test('handles null and primitives', () => {
287
+
assert.deepStrictEqual(findBlobRefs(null), []);
288
+
assert.deepStrictEqual(findBlobRefs('string'), []);
289
+
assert.deepStrictEqual(findBlobRefs(42), []);
290
+
});
291
+
});
292
+
```
293
+
294
+
**Step 2: Run test to verify it fails**
295
+
296
+
Run: `npm test`
297
+
Expected: FAIL with "findBlobRefs is not exported"
298
+
299
+
**Step 3: Write minimal implementation**
300
+
301
+
Add to `src/pds.js` after sniffMimeType:
302
+
303
+
```javascript
304
+
// === BLOB REF DETECTION ===
305
+
// Recursively find blob references in records
306
+
307
+
/**
308
+
* Find all blob CID references in a record
309
+
* @param {*} obj - Record value to scan
310
+
* @param {string[]} refs - Accumulator array (internal)
311
+
* @returns {string[]} Array of blob CID strings
312
+
*/
313
+
export function findBlobRefs(obj, refs = []) {
314
+
if (!obj || typeof obj !== 'object') {
315
+
return refs;
316
+
}
317
+
318
+
// Check if this object is a blob ref
319
+
if (obj.$type === 'blob' && obj.ref?.$link) {
320
+
refs.push(obj.ref.$link);
321
+
}
322
+
323
+
// Recurse into arrays and objects
324
+
if (Array.isArray(obj)) {
325
+
for (const item of obj) {
326
+
findBlobRefs(item, refs);
327
+
}
328
+
} else {
329
+
for (const value of Object.values(obj)) {
330
+
findBlobRefs(value, refs);
331
+
}
332
+
}
333
+
334
+
return refs;
335
+
}
336
+
```
337
+
338
+
**Step 4: Run test to verify it passes**
339
+
340
+
Run: `npm test`
341
+
Expected: PASS
342
+
343
+
**Step 5: Commit**
344
+
345
+
```bash
346
+
git add src/pds.js test/pds.test.js
347
+
git commit -m "feat: add blob ref detection for records"
348
+
```
349
+
350
+
---
351
+
352
+
## Task 5: Implement uploadBlob Endpoint
353
+
354
+
**Files:**
355
+
- Modify: `src/pds.js` (add route and handler)
356
+
357
+
**Step 1: Add route to pdsRoutes**
358
+
359
+
In `pdsRoutes` object (around line 1055), add:
360
+
361
+
```javascript
362
+
'/xrpc/com.atproto.repo.uploadBlob': {
363
+
method: 'POST',
364
+
handler: (pds, req, _url) => pds.handleUploadBlob(req),
365
+
},
366
+
```
367
+
368
+
**Step 2: Add handler method to PersonalDataServer class**
369
+
370
+
Add method to the class (after existing handlers):
371
+
372
+
```javascript
373
+
async handleUploadBlob(request) {
374
+
// Require auth
375
+
const authResult = await this.requireAuth(request);
376
+
if (authResult instanceof Response) return authResult;
377
+
378
+
const did = await this.getDid();
379
+
if (!did) {
380
+
return errorResponse('InvalidRequest', 'PDS not initialized', 400);
381
+
}
382
+
383
+
// Read body as ArrayBuffer
384
+
const bodyBytes = await request.arrayBuffer();
385
+
const size = bodyBytes.byteLength;
386
+
387
+
// Check size limit (50MB)
388
+
const MAX_BLOB_SIZE = 50 * 1024 * 1024;
389
+
if (size > MAX_BLOB_SIZE) {
390
+
return errorResponse(
391
+
'BlobTooLarge',
392
+
`Blob size ${size} exceeds maximum ${MAX_BLOB_SIZE}`,
393
+
400,
394
+
);
395
+
}
396
+
397
+
// Sniff MIME type, fall back to Content-Type header
398
+
const contentType = request.headers.get('Content-Type') || 'application/octet-stream';
399
+
const sniffed = sniffMimeType(bodyBytes);
400
+
const mimeType = sniffed || contentType;
401
+
402
+
// Compute CID (reuse existing createCid)
403
+
const cid = await createCid(new Uint8Array(bodyBytes));
404
+
const cidStr = cidToString(cid);
405
+
406
+
// Check if blob already exists
407
+
const existing = this.sql
408
+
.exec('SELECT cid FROM blob WHERE cid = ?', cidStr)
409
+
.toArray();
410
+
411
+
if (existing.length === 0) {
412
+
// Upload to R2
413
+
const r2Key = `${did}/${cidStr}`;
414
+
await this.env.BLOBS.put(r2Key, bodyBytes, {
415
+
httpMetadata: { contentType: mimeType },
416
+
});
417
+
418
+
// Insert metadata
419
+
const createdAt = new Date().toISOString();
420
+
this.sql.exec(
421
+
'INSERT INTO blob (cid, mimeType, size, createdAt) VALUES (?, ?, ?, ?)',
422
+
cidStr,
423
+
mimeType,
424
+
size,
425
+
createdAt,
426
+
);
427
+
}
428
+
429
+
// Return BlobRef
430
+
return Response.json({
431
+
blob: {
432
+
$type: 'blob',
433
+
ref: { $link: cidStr },
434
+
mimeType,
435
+
size,
436
+
},
437
+
});
438
+
}
439
+
```
440
+
441
+
**Step 3: Verify deployment**
442
+
443
+
Run: `npx wrangler deploy`
444
+
445
+
**Step 4: Test manually with curl**
446
+
447
+
```bash
448
+
curl -X POST \
449
+
-H "Authorization: Bearer <access-token>" \
450
+
-H "Content-Type: image/png" \
451
+
--data-binary @test-image.png \
452
+
https://your-pds.workers.dev/xrpc/com.atproto.repo.uploadBlob
453
+
```
454
+
455
+
Expected: JSON response with blob ref
456
+
457
+
**Step 5: Commit**
458
+
459
+
```bash
460
+
git add src/pds.js
461
+
git commit -m "feat: implement uploadBlob endpoint with R2 storage"
462
+
```
463
+
464
+
---
465
+
466
+
## Task 6: Implement getBlob Endpoint
467
+
468
+
**Files:**
469
+
- Modify: `src/pds.js` (add route and handler)
470
+
471
+
**Step 1: Add route to pdsRoutes**
472
+
473
+
```javascript
474
+
'/xrpc/com.atproto.sync.getBlob': {
475
+
handler: (pds, _req, url) => pds.handleGetBlob(url),
476
+
},
477
+
```
478
+
479
+
**Step 2: Add handler method**
480
+
481
+
```javascript
482
+
async handleGetBlob(url) {
483
+
const did = url.searchParams.get('did');
484
+
const cid = url.searchParams.get('cid');
485
+
486
+
if (!did || !cid) {
487
+
return errorResponse('InvalidRequest', 'missing did or cid parameter', 400);
488
+
}
489
+
490
+
// Verify DID matches this DO
491
+
const myDid = await this.getDid();
492
+
if (did !== myDid) {
493
+
return errorResponse('InvalidRequest', 'DID does not match this repo', 400);
494
+
}
495
+
496
+
// Look up blob metadata
497
+
const rows = this.sql
498
+
.exec('SELECT mimeType, size FROM blob WHERE cid = ?', cid)
499
+
.toArray();
500
+
501
+
if (rows.length === 0) {
502
+
return errorResponse('BlobNotFound', 'blob not found', 404);
503
+
}
504
+
505
+
const { mimeType, size } = rows[0];
506
+
507
+
// Fetch from R2
508
+
const r2Key = `${did}/${cid}`;
509
+
const object = await this.env.BLOBS.get(r2Key);
510
+
511
+
if (!object) {
512
+
return errorResponse('BlobNotFound', 'blob not found in storage', 404);
513
+
}
514
+
515
+
// Return blob with security headers
516
+
return new Response(object.body, {
517
+
headers: {
518
+
'Content-Type': mimeType,
519
+
'Content-Length': String(size),
520
+
'X-Content-Type-Options': 'nosniff',
521
+
'Content-Security-Policy': "default-src 'none'; sandbox",
522
+
'Cache-Control': 'public, max-age=31536000, immutable',
523
+
},
524
+
});
525
+
}
526
+
```
527
+
528
+
**Step 3: Deploy and test**
529
+
530
+
Run: `npx wrangler deploy`
531
+
532
+
Test:
533
+
```bash
534
+
curl "https://your-pds.workers.dev/xrpc/com.atproto.sync.getBlob?did=did:plc:xxx&cid=bafkrei..."
535
+
```
536
+
537
+
**Step 4: Commit**
538
+
539
+
```bash
540
+
git add src/pds.js
541
+
git commit -m "feat: implement getBlob endpoint"
542
+
```
543
+
544
+
---
545
+
546
+
## Task 7: Implement listBlobs Endpoint
547
+
548
+
**Files:**
549
+
- Modify: `src/pds.js` (add route and handler)
550
+
551
+
**Step 1: Add route to pdsRoutes**
552
+
553
+
```javascript
554
+
'/xrpc/com.atproto.sync.listBlobs': {
555
+
handler: (pds, _req, url) => pds.handleListBlobs(url),
556
+
},
557
+
```
558
+
559
+
**Step 2: Add handler method**
560
+
561
+
```javascript
562
+
async handleListBlobs(url) {
563
+
const did = url.searchParams.get('did');
564
+
const cursor = url.searchParams.get('cursor');
565
+
const limit = Math.min(Number(url.searchParams.get('limit')) || 500, 1000);
566
+
567
+
if (!did) {
568
+
return errorResponse('InvalidRequest', 'missing did parameter', 400);
569
+
}
570
+
571
+
// Verify DID matches this DO
572
+
const myDid = await this.getDid();
573
+
if (did !== myDid) {
574
+
return errorResponse('InvalidRequest', 'DID does not match this repo', 400);
575
+
}
576
+
577
+
// Query blobs with pagination
578
+
let query = 'SELECT cid, createdAt FROM blob';
579
+
const params = [];
580
+
581
+
if (cursor) {
582
+
query += ' WHERE createdAt > ?';
583
+
params.push(cursor);
584
+
}
585
+
586
+
query += ' ORDER BY createdAt ASC LIMIT ?';
587
+
params.push(limit + 1); // Fetch one extra to detect if there's more
588
+
589
+
const rows = this.sql.exec(query, ...params).toArray();
590
+
591
+
// Determine if there's a next page
592
+
let nextCursor = null;
593
+
if (rows.length > limit) {
594
+
rows.pop(); // Remove the extra row
595
+
nextCursor = rows[rows.length - 1].createdAt;
596
+
}
597
+
598
+
return Response.json({
599
+
cids: rows.map((r) => r.cid),
600
+
cursor: nextCursor,
601
+
});
602
+
}
603
+
```
604
+
605
+
**Step 3: Deploy and test**
606
+
607
+
Run: `npx wrangler deploy`
608
+
609
+
Test:
610
+
```bash
611
+
curl "https://your-pds.workers.dev/xrpc/com.atproto.sync.listBlobs?did=did:plc:xxx"
612
+
```
613
+
614
+
**Step 4: Commit**
615
+
616
+
```bash
617
+
git add src/pds.js
618
+
git commit -m "feat: implement listBlobs endpoint"
619
+
```
620
+
621
+
---
622
+
623
+
## Task 8: Integrate Blob Association with createRecord
624
+
625
+
**Files:**
626
+
- Modify: `src/pds.js:1253` (createRecord method)
627
+
628
+
**Step 1: Add blob association after record storage**
629
+
630
+
In `createRecord` method, after storing the record in the `records` table (around line 1280), add:
631
+
632
+
```javascript
633
+
// Associate blobs with this record
634
+
const blobRefs = findBlobRefs(record);
635
+
for (const blobCid of blobRefs) {
636
+
// Verify blob exists
637
+
const blobExists = this.sql
638
+
.exec('SELECT cid FROM blob WHERE cid = ?', blobCid)
639
+
.toArray();
640
+
641
+
if (blobExists.length === 0) {
642
+
throw new Error(`BlobNotFound: ${blobCid}`);
643
+
}
644
+
645
+
// Create association
646
+
this.sql.exec(
647
+
'INSERT OR IGNORE INTO record_blob (blobCid, recordUri) VALUES (?, ?)',
648
+
blobCid,
649
+
uri,
650
+
);
651
+
}
652
+
```
653
+
654
+
**Step 2: Deploy and test**
655
+
656
+
Test by uploading a blob, then creating a post that references it:
657
+
658
+
```bash
659
+
# Upload blob
660
+
BLOB=$(curl -X POST -H "Authorization: Bearer $TOKEN" \
661
+
-H "Content-Type: image/png" --data-binary @test.png \
662
+
https://your-pds.workers.dev/xrpc/com.atproto.repo.uploadBlob)
663
+
664
+
echo $BLOB # Get the CID
665
+
666
+
# Create post with image
667
+
curl -X POST -H "Authorization: Bearer $TOKEN" \
668
+
-H "Content-Type: application/json" \
669
+
https://your-pds.workers.dev/xrpc/com.atproto.repo.createRecord \
670
+
-d '{
671
+
"repo": "did:plc:xxx",
672
+
"collection": "app.bsky.feed.post",
673
+
"record": {
674
+
"$type": "app.bsky.feed.post",
675
+
"text": "Hello with image!",
676
+
"createdAt": "2026-01-06T12:00:00.000Z",
677
+
"embed": {
678
+
"$type": "app.bsky.embed.images",
679
+
"images": [{
680
+
"image": {
681
+
"$type": "blob",
682
+
"ref": {"$link": "<cid-from-upload>"},
683
+
"mimeType": "image/png",
684
+
"size": 1234
685
+
},
686
+
"alt": "test"
687
+
}]
688
+
}
689
+
}
690
+
}'
691
+
```
692
+
693
+
**Step 3: Commit**
694
+
695
+
```bash
696
+
git add src/pds.js
697
+
git commit -m "feat: associate blobs with records on createRecord"
698
+
```
699
+
700
+
---
701
+
702
+
## Task 9: Implement Blob Cleanup on deleteRecord
703
+
704
+
**Files:**
705
+
- Modify: `src/pds.js:1391` (deleteRecord method)
706
+
707
+
**Step 1: Add blob cleanup after record deletion**
708
+
709
+
In `deleteRecord` method, after deleting the record from the `records` table, add:
710
+
711
+
```javascript
712
+
// Get blobs associated with this record
713
+
const associatedBlobs = this.sql
714
+
.exec('SELECT blobCid FROM record_blob WHERE recordUri = ?', uri)
715
+
.toArray();
716
+
717
+
// Remove associations for this record
718
+
this.sql.exec('DELETE FROM record_blob WHERE recordUri = ?', uri);
719
+
720
+
// Check each blob for orphan status and delete if unreferenced
721
+
for (const { blobCid } of associatedBlobs) {
722
+
const stillReferenced = this.sql
723
+
.exec('SELECT 1 FROM record_blob WHERE blobCid = ? LIMIT 1', blobCid)
724
+
.toArray();
725
+
726
+
if (stillReferenced.length === 0) {
727
+
// Blob is orphaned, delete from R2 and database
728
+
const did = await this.getDid();
729
+
await this.env.BLOBS.delete(`${did}/${blobCid}`);
730
+
this.sql.exec('DELETE FROM blob WHERE cid = ?', blobCid);
731
+
}
732
+
}
733
+
```
734
+
735
+
**Step 2: Deploy and test**
736
+
737
+
Test by creating a post with an image, then deleting it:
738
+
739
+
```bash
740
+
# Delete the post
741
+
curl -X POST -H "Authorization: Bearer $TOKEN" \
742
+
-H "Content-Type: application/json" \
743
+
https://your-pds.workers.dev/xrpc/com.atproto.repo.deleteRecord \
744
+
-d '{
745
+
"repo": "did:plc:xxx",
746
+
"collection": "app.bsky.feed.post",
747
+
"rkey": "<rkey>"
748
+
}'
749
+
750
+
# Verify blob is gone
751
+
curl "https://your-pds.workers.dev/xrpc/com.atproto.sync.listBlobs?did=did:plc:xxx"
752
+
```
753
+
754
+
**Step 3: Commit**
755
+
756
+
```bash
757
+
git add src/pds.js
758
+
git commit -m "feat: cleanup orphaned blobs on record deletion"
759
+
```
760
+
761
+
---
762
+
763
+
## Task 10: Implement Orphan Cleanup Alarm
764
+
765
+
**Files:**
766
+
- Modify: `src/pds.js` (add alarm handler and scheduling)
767
+
768
+
**Step 1: Add alarm scheduling in initIdentity**
769
+
770
+
In the `initIdentity` method (or after successful init), add:
771
+
772
+
```javascript
773
+
// Schedule blob cleanup alarm (runs daily)
774
+
const currentAlarm = await this.state.storage.getAlarm();
775
+
if (!currentAlarm) {
776
+
await this.state.storage.setAlarm(Date.now() + 24 * 60 * 60 * 1000);
777
+
}
778
+
```
779
+
780
+
**Step 2: Add alarm handler to PersonalDataServer class**
781
+
782
+
```javascript
783
+
async alarm() {
784
+
await this.cleanupOrphanedBlobs();
785
+
// Reschedule for next day
786
+
await this.state.storage.setAlarm(Date.now() + 24 * 60 * 60 * 1000);
787
+
}
788
+
789
+
async cleanupOrphanedBlobs() {
790
+
const did = await this.getDid();
791
+
if (!did) return;
792
+
793
+
// Find orphans: blobs not in record_blob, older than 24h
794
+
const cutoff = new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString();
795
+
796
+
const orphans = this.sql
797
+
.exec(
798
+
`SELECT b.cid FROM blob b
799
+
LEFT JOIN record_blob rb ON b.cid = rb.blobCid
800
+
WHERE rb.blobCid IS NULL AND b.createdAt < ?`,
801
+
cutoff,
802
+
)
803
+
.toArray();
804
+
805
+
for (const { cid } of orphans) {
806
+
await this.env.BLOBS.delete(`${did}/${cid}`);
807
+
this.sql.exec('DELETE FROM blob WHERE cid = ?', cid);
808
+
}
809
+
810
+
if (orphans.length > 0) {
811
+
console.log(`Cleaned up ${orphans.length} orphaned blobs`);
812
+
}
813
+
}
814
+
```
815
+
816
+
**Step 3: Deploy**
817
+
818
+
Run: `npx wrangler deploy`
819
+
820
+
**Step 4: Commit**
821
+
822
+
```bash
823
+
git add src/pds.js
824
+
git commit -m "feat: add DO alarm for orphaned blob cleanup"
825
+
```
826
+
827
+
---
828
+
829
+
## Task 11: Update README
830
+
831
+
**Files:**
832
+
- Modify: `README.md`
833
+
834
+
**Step 1: Update feature checklist**
835
+
836
+
Change:
837
+
```markdown
838
+
- [ ] Blob storage (uploadBlob, getBlob, listBlobs)
839
+
```
840
+
841
+
To:
842
+
```markdown
843
+
- [x] Blob storage (uploadBlob, getBlob, listBlobs)
844
+
```
845
+
846
+
**Step 2: Add blob configuration section**
847
+
848
+
Add under configuration:
849
+
850
+
```markdown
851
+
### Blob Storage
852
+
853
+
Blobs (images, videos) are stored in Cloudflare R2:
854
+
855
+
1. Create an R2 bucket: `npx wrangler r2 bucket create pds-blobs`
856
+
2. The binding is already configured in `wrangler.toml`
857
+
858
+
Supported formats: JPEG, PNG, GIF, WebP, MP4
859
+
Max size: 50MB
860
+
Orphaned blobs are automatically cleaned up after 24 hours.
861
+
```
862
+
863
+
**Step 3: Commit**
864
+
865
+
```bash
866
+
git add README.md
867
+
git commit -m "docs: update README with blob storage feature"
868
+
```
869
+
870
+
---
871
+
872
+
## Summary
873
+
874
+
| Task | Description | Files Modified |
875
+
|------|-------------|----------------|
876
+
| 1 | Add R2 bucket binding | `wrangler.toml` |
877
+
| 2 | Add blob database schema | `src/pds.js` |
878
+
| 3 | Implement MIME sniffing | `src/pds.js`, `test/pds.test.js` |
879
+
| 4 | Implement blob ref detection | `src/pds.js`, `test/pds.test.js` |
880
+
| 5 | Implement uploadBlob endpoint | `src/pds.js` |
881
+
| 6 | Implement getBlob endpoint | `src/pds.js` |
882
+
| 7 | Implement listBlobs endpoint | `src/pds.js` |
883
+
| 8 | Integrate blob association | `src/pds.js` |
884
+
| 9 | Cleanup blobs on delete | `src/pds.js` |
885
+
| 10 | Add orphan cleanup alarm | `src/pds.js` |
886
+
| 11 | Update README | `README.md` |
887
+
888
+
**Estimated additions:** ~250 lines to `src/pds.js`, ~60 lines to `test/pds.test.js`
+496
docs/plans/2026-01-06-pds-file-reorganization.md
+496
docs/plans/2026-01-06-pds-file-reorganization.md
···
1
+
# PDS File Reorganization Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Reorganize pds.js into logical domain sections with box-style headers for improved readability.
6
+
7
+
**Architecture:** Reorder existing code into 12 logical domains without changing functionality. Add Unicode box-style section headers. Group related utilities that are currently scattered.
8
+
9
+
**Tech Stack:** JavaScript, JSDoc
10
+
11
+
---
12
+
13
+
## Box Header Format
14
+
15
+
All section headers use this format (80 chars wide):
16
+
```javascript
17
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
18
+
// โ SECTION NAME โ
19
+
// โ Brief description of what this section contains โ
20
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
21
+
```
22
+
23
+
---
24
+
25
+
### Task 1: Types & Constants Section
26
+
27
+
**Files:**
28
+
- Modify: `src/pds.js` (lines 17-84, plus scattered constants)
29
+
30
+
**Step 1: Create the new section header and gather all types/constants**
31
+
32
+
Move these items to the top (after the file header comment):
33
+
- `CBOR_FALSE`, `CBOR_TRUE`, `CBOR_NULL`, `CBOR_TAG_CID` (from lines 19-24)
34
+
- `CODEC_DAG_CBOR`, `CODEC_RAW` (from lines 480-481)
35
+
- `TID_CHARS`, `clockId`, `lastTimestamp` (from lines 563-566)
36
+
- `P256_N`, `P256_N_DIV_2` (from lines 638-641)
37
+
- All typedefs: `Env`, `BlockRow`, `RecordRow`, `CommitRow`, `SeqEventRow`, `BlobRow`, `JwtPayload`
38
+
39
+
Add header:
40
+
```javascript
41
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
42
+
// โ TYPES & CONSTANTS โ
43
+
// โ Environment bindings, SQL row types, protocol constants โ
44
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
45
+
```
46
+
47
+
**Step 2: Run typecheck to verify no breakage**
48
+
49
+
Run: `npm run typecheck`
50
+
Expected: 0 errors
51
+
52
+
**Step 3: Commit**
53
+
54
+
```bash
55
+
git add src/pds.js
56
+
git commit -m "refactor: consolidate types and constants section"
57
+
```
58
+
59
+
---
60
+
61
+
### Task 2: Utilities Section
62
+
63
+
**Files:**
64
+
- Modify: `src/pds.js`
65
+
66
+
**Step 1: Create utilities section after types/constants**
67
+
68
+
Move these functions together:
69
+
- `errorResponse()` (from line 92)
70
+
- `bytesToHex()` (from line 990)
71
+
- `hexToBytes()` (from line 1001)
72
+
- `bytesToBigInt()` (from line 647)
73
+
- `bigIntToBytes()` (from line 660)
74
+
- `base32Encode()` (from line 538)
75
+
- `base32Decode()` (from line 1237)
76
+
- `base64UrlEncode()` (from line 745)
77
+
- `base64UrlDecode()` (from line 759)
78
+
- `varint()` (from line 1211)
79
+
80
+
Add header:
81
+
```javascript
82
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
83
+
// โ UTILITIES โ
84
+
// โ Error responses, byte conversion, base encoding โ
85
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
86
+
```
87
+
88
+
**Step 2: Run typecheck**
89
+
90
+
Run: `npm run typecheck`
91
+
Expected: 0 errors
92
+
93
+
**Step 3: Commit**
94
+
95
+
```bash
96
+
git add src/pds.js
97
+
git commit -m "refactor: consolidate utilities section"
98
+
```
99
+
100
+
---
101
+
102
+
### Task 3: CBOR Encoding Section
103
+
104
+
**Files:**
105
+
- Modify: `src/pds.js`
106
+
107
+
**Step 1: Create CBOR section**
108
+
109
+
Keep together (already grouped, just add new header):
110
+
- `encodeHead()`
111
+
- `cborEncode()`
112
+
- `cborEncodeDagCbor()`
113
+
- `cborDecode()`
114
+
115
+
Replace `// === CBOR ENCODING ===` with:
116
+
```javascript
117
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
118
+
// โ CBOR ENCODING โ
119
+
// โ RFC 8949 CBOR and DAG-CBOR for content-addressed data โ
120
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
121
+
```
122
+
123
+
**Step 2: Run typecheck**
124
+
125
+
Run: `npm run typecheck`
126
+
Expected: 0 errors
127
+
128
+
**Step 3: Commit**
129
+
130
+
```bash
131
+
git add src/pds.js
132
+
git commit -m "refactor: add CBOR encoding section header"
133
+
```
134
+
135
+
---
136
+
137
+
### Task 4: Content Identifiers Section
138
+
139
+
**Files:**
140
+
- Modify: `src/pds.js`
141
+
142
+
**Step 1: Create CID/TID section**
143
+
144
+
Group together:
145
+
- `class CID` (from line 238)
146
+
- `createCidWithCodec()` (from line 489)
147
+
- `createCid()` (from line 510)
148
+
- `createBlobCid()` (from line 519)
149
+
- `cidToString()` (from line 528)
150
+
- `cidToBytes()` (from line 1226)
151
+
- `createTid()` (from line 572)
152
+
153
+
Add header:
154
+
```javascript
155
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
156
+
// โ CONTENT IDENTIFIERS โ
157
+
// โ CIDs (content hashes) and TIDs (timestamp IDs) โ
158
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
159
+
```
160
+
161
+
**Step 2: Run typecheck**
162
+
163
+
Run: `npm run typecheck`
164
+
Expected: 0 errors
165
+
166
+
**Step 3: Commit**
167
+
168
+
```bash
169
+
git add src/pds.js
170
+
git commit -m "refactor: consolidate content identifiers section"
171
+
```
172
+
173
+
---
174
+
175
+
### Task 5: Cryptography Section
176
+
177
+
**Files:**
178
+
- Modify: `src/pds.js`
179
+
180
+
**Step 1: Create cryptography section**
181
+
182
+
Group together:
183
+
- `sha256()` (from line 1016)
184
+
- `importPrivateKey()` (from line 606)
185
+
- `generateKeyPair()` (from line 705)
186
+
- `compressPublicKey()` (from line 728)
187
+
- `sign()` (from line 675)
188
+
189
+
Add header:
190
+
```javascript
191
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
192
+
// โ CRYPTOGRAPHY โ
193
+
// โ P-256 signing with low-S normalization, key management โ
194
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
195
+
```
196
+
197
+
**Step 2: Run typecheck**
198
+
199
+
Run: `npm run typecheck`
200
+
Expected: 0 errors
201
+
202
+
**Step 3: Commit**
203
+
204
+
```bash
205
+
git add src/pds.js
206
+
git commit -m "refactor: create cryptography section"
207
+
```
208
+
209
+
---
210
+
211
+
### Task 6: Authentication Section
212
+
213
+
**Files:**
214
+
- Modify: `src/pds.js`
215
+
216
+
**Step 1: Create authentication section**
217
+
218
+
Group together:
219
+
- `hmacSign()` (from line 777)
220
+
- `createAccessJwt()` (from line 800)
221
+
- `createRefreshJwt()` (from line 829)
222
+
- `verifyJwt()` (from line 876)
223
+
- `verifyAccessJwt()` (from line 919)
224
+
- `verifyRefreshJwt()` (from line 931)
225
+
- `createServiceJwt()` (from line 952)
226
+
227
+
Add header:
228
+
```javascript
229
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
230
+
// โ AUTHENTICATION โ
231
+
// โ JWT creation/verification for sessions and service auth โ
232
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
233
+
```
234
+
235
+
**Step 2: Run typecheck**
236
+
237
+
Run: `npm run typecheck`
238
+
Expected: 0 errors
239
+
240
+
**Step 3: Commit**
241
+
242
+
```bash
243
+
git add src/pds.js
244
+
git commit -m "refactor: create authentication section"
245
+
```
246
+
247
+
---
248
+
249
+
### Task 7: Merkle Search Tree Section
250
+
251
+
**Files:**
252
+
- Modify: `src/pds.js`
253
+
254
+
**Step 1: Update MST section header**
255
+
256
+
Keep together (already grouped):
257
+
- `keyDepthCache`
258
+
- `getKeyDepth()`
259
+
- `commonPrefixLen()`
260
+
- `class MST`
261
+
262
+
Replace `// === MERKLE SEARCH TREE ===` with:
263
+
```javascript
264
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
265
+
// โ MERKLE SEARCH TREE โ
266
+
// โ MST for ATProto repository structure โ
267
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
268
+
```
269
+
270
+
**Step 2: Run typecheck**
271
+
272
+
Run: `npm run typecheck`
273
+
Expected: 0 errors
274
+
275
+
**Step 3: Commit**
276
+
277
+
```bash
278
+
git add src/pds.js
279
+
git commit -m "refactor: update MST section header"
280
+
```
281
+
282
+
---
283
+
284
+
### Task 8: CAR Files Section
285
+
286
+
**Files:**
287
+
- Modify: `src/pds.js`
288
+
289
+
**Step 1: Update CAR section**
290
+
291
+
Keep only:
292
+
- `buildCarFile()`
293
+
294
+
(Note: `varint()`, `cidToBytes()`, `base32Decode()` moved to earlier sections)
295
+
296
+
Replace `// === CAR FILE BUILDER ===` with:
297
+
```javascript
298
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
299
+
// โ CAR FILES โ
300
+
// โ Content Addressable aRchive format for repo sync โ
301
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
302
+
```
303
+
304
+
**Step 2: Run typecheck**
305
+
306
+
Run: `npm run typecheck`
307
+
Expected: 0 errors
308
+
309
+
**Step 3: Commit**
310
+
311
+
```bash
312
+
git add src/pds.js
313
+
git commit -m "refactor: update CAR files section"
314
+
```
315
+
316
+
---
317
+
318
+
### Task 9: Blob Handling Section
319
+
320
+
**Files:**
321
+
- Modify: `src/pds.js`
322
+
323
+
**Step 1: Create blob handling section**
324
+
325
+
Group together:
326
+
- `sniffMimeType()` (from line 105)
327
+
- `findBlobRefs()` (from line 181)
328
+
- `CRAWL_NOTIFY_THRESHOLD`, `lastCrawlNotify` (from lines 207-208)
329
+
- `notifyCrawlers()` (from line 214)
330
+
331
+
Add header:
332
+
```javascript
333
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
334
+
// โ BLOB HANDLING โ
335
+
// โ MIME detection, blob reference scanning, crawler notification โ
336
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
337
+
```
338
+
339
+
**Step 2: Run typecheck**
340
+
341
+
Run: `npm run typecheck`
342
+
Expected: 0 errors
343
+
344
+
**Step 3: Commit**
345
+
346
+
```bash
347
+
git add src/pds.js
348
+
git commit -m "refactor: create blob handling section"
349
+
```
350
+
351
+
---
352
+
353
+
### Task 10: Routing Section
354
+
355
+
**Files:**
356
+
- Modify: `src/pds.js`
357
+
358
+
**Step 1: Add routing section header**
359
+
360
+
Before `RouteHandler` callback typedef, add:
361
+
```javascript
362
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
363
+
// โ ROUTING โ
364
+
// โ XRPC endpoint definitions โ
365
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
366
+
```
367
+
368
+
This section contains:
369
+
- `RouteHandler` (callback typedef)
370
+
- `Route` (typedef)
371
+
- `pdsRoutes`
372
+
373
+
**Step 2: Run typecheck**
374
+
375
+
Run: `npm run typecheck`
376
+
Expected: 0 errors
377
+
378
+
**Step 3: Commit**
379
+
380
+
```bash
381
+
git add src/pds.js
382
+
git commit -m "refactor: add routing section header"
383
+
```
384
+
385
+
---
386
+
387
+
### Task 11: Personal Data Server Section
388
+
389
+
**Files:**
390
+
- Modify: `src/pds.js`
391
+
392
+
**Step 1: Add PDS class section header**
393
+
394
+
Before `class PersonalDataServer`, add:
395
+
```javascript
396
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
397
+
// โ PERSONAL DATA SERVER โ
398
+
// โ Durable Object class implementing ATProto PDS โ
399
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
400
+
```
401
+
402
+
**Step 2: Run typecheck**
403
+
404
+
Run: `npm run typecheck`
405
+
Expected: 0 errors
406
+
407
+
**Step 3: Commit**
408
+
409
+
```bash
410
+
git add src/pds.js
411
+
git commit -m "refactor: add PDS class section header"
412
+
```
413
+
414
+
---
415
+
416
+
### Task 12: Workers Entry Point Section
417
+
418
+
**Files:**
419
+
- Modify: `src/pds.js`
420
+
421
+
**Step 1: Add workers entry point section header**
422
+
423
+
Before `corsHeaders`, add:
424
+
```javascript
425
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
426
+
// โ WORKERS ENTRY POINT โ
427
+
// โ Request handling, CORS, auth middleware โ
428
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
429
+
```
430
+
431
+
This section contains:
432
+
- `corsHeaders`
433
+
- `addCorsHeaders()`
434
+
- `getSubdomain()`
435
+
- `requireAuth()`
436
+
- `handleAuthenticatedBlobUpload()`
437
+
- `handleAuthenticatedRepoWrite()`
438
+
- `handleRequest()`
439
+
- `default export`
440
+
441
+
**Step 2: Run typecheck**
442
+
443
+
Run: `npm run typecheck`
444
+
Expected: 0 errors
445
+
446
+
**Step 3: Commit**
447
+
448
+
```bash
449
+
git add src/pds.js
450
+
git commit -m "refactor: add workers entry point section header"
451
+
```
452
+
453
+
---
454
+
455
+
### Task 13: Final Verification
456
+
457
+
**Step 1: Run full typecheck**
458
+
459
+
Run: `npm run typecheck`
460
+
Expected: 0 errors
461
+
462
+
**Step 2: Run tests**
463
+
464
+
Run: `npm test`
465
+
Expected: All tests pass
466
+
467
+
**Step 3: Run e2e tests if available**
468
+
469
+
Run: `npm run test:e2e`
470
+
Expected: All tests pass
471
+
472
+
**Step 4: Final commit if any cleanup needed**
473
+
474
+
```bash
475
+
git add src/pds.js
476
+
git commit -m "refactor: complete pds.js reorganization with box headers"
477
+
```
478
+
479
+
---
480
+
481
+
## Section Order Summary
482
+
483
+
Final file structure (top to bottom):
484
+
1. File header comment
485
+
2. TYPES & CONSTANTS
486
+
3. UTILITIES
487
+
4. CBOR ENCODING
488
+
5. CONTENT IDENTIFIERS
489
+
6. CRYPTOGRAPHY
490
+
7. AUTHENTICATION
491
+
8. MERKLE SEARCH TREE
492
+
9. CAR FILES
493
+
10. BLOB HANDLING
494
+
11. ROUTING
495
+
12. PERSONAL DATA SERVER
496
+
13. WORKERS ENTRY POINT
+1283
docs/plans/2026-01-07-oauth-implementation.md
+1283
docs/plans/2026-01-07-oauth-implementation.md
···
1
+
# OAuth Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Add full AT Protocol OAuth support (PAR, DPoP, PKCE, authorization code flow) to pds.js while maintaining zero external dependencies.
6
+
7
+
**Architecture:** Extend the existing single-file pds.js with OAuth endpoints. Store authorization requests and tokens in SQLite. Use Web Crypto APIs for all cryptographic operations. Minimal server-rendered HTML for consent UI.
8
+
9
+
**Tech Stack:** JavaScript (Cloudflare Workers), SQLite (Durable Objects), Web Crypto API, P-256/ES256 signatures.
10
+
11
+
---
12
+
13
+
## Task 1: Add OAuth Database Tables
14
+
15
+
**Files:**
16
+
- Modify: `src/pds.js`
17
+
18
+
**Step 1: Add tables to initializeDatabase**
19
+
20
+
In `src/pds.js`, add to the `initializeDatabase` function after existing table creation:
21
+
22
+
```javascript
23
+
// OAuth authorization requests (from PAR)
24
+
await sql`
25
+
CREATE TABLE IF NOT EXISTS authorization_requests (
26
+
id TEXT PRIMARY KEY,
27
+
client_id TEXT NOT NULL,
28
+
client_metadata TEXT NOT NULL,
29
+
parameters TEXT NOT NULL,
30
+
code TEXT,
31
+
code_challenge TEXT,
32
+
code_challenge_method TEXT,
33
+
dpop_jkt TEXT,
34
+
did TEXT,
35
+
expires_at TEXT NOT NULL,
36
+
created_at TEXT NOT NULL
37
+
)
38
+
`;
39
+
40
+
await sql`
41
+
CREATE INDEX IF NOT EXISTS idx_authorization_requests_code
42
+
ON authorization_requests(code) WHERE code IS NOT NULL
43
+
`;
44
+
45
+
// OAuth tokens
46
+
await sql`
47
+
CREATE TABLE IF NOT EXISTS tokens (
48
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
49
+
token_id TEXT UNIQUE NOT NULL,
50
+
did TEXT NOT NULL,
51
+
client_id TEXT NOT NULL,
52
+
scope TEXT,
53
+
dpop_jkt TEXT,
54
+
expires_at TEXT NOT NULL,
55
+
refresh_token TEXT UNIQUE,
56
+
created_at TEXT NOT NULL,
57
+
updated_at TEXT NOT NULL
58
+
)
59
+
`;
60
+
61
+
await sql`
62
+
CREATE INDEX IF NOT EXISTS idx_tokens_did ON tokens(did)
63
+
`;
64
+
```
65
+
66
+
**Step 2: Commit**
67
+
68
+
```bash
69
+
git add src/pds.js
70
+
git commit -m "feat(oauth): add authorization_requests and tokens tables"
71
+
```
72
+
73
+
---
74
+
75
+
## Task 2: Implement JWK Thumbprint
76
+
77
+
**Files:**
78
+
- Modify: `src/pds.js`
79
+
- Test: `test/pds.test.js`
80
+
81
+
**Step 1: Add unit test**
82
+
83
+
Add to `test/pds.test.js` imports and test:
84
+
85
+
```javascript
86
+
import {
87
+
// ... existing imports ...
88
+
computeJwkThumbprint,
89
+
} from '../src/pds.js';
90
+
91
+
describe('JWK Thumbprint', () => {
92
+
test('computes deterministic thumbprint for EC key', async () => {
93
+
// Test vector: known JWK and its expected thumbprint
94
+
const jwk = {
95
+
kty: 'EC',
96
+
crv: 'P-256',
97
+
x: 'WbbCfHGZ9QtKsVuMdPZ8hBbP2949N_CSLG3LVV0nnKY',
98
+
y: 'eSgPlDj0RVMw8t8u4MvCYG4j_JfDwvrMUUwEEHVLmqQ'
99
+
};
100
+
101
+
const jkt1 = await computeJwkThumbprint(jwk);
102
+
const jkt2 = await computeJwkThumbprint(jwk);
103
+
104
+
// Thumbprint must be deterministic
105
+
assert.strictEqual(jkt1, jkt2);
106
+
// Must be base64url-encoded SHA-256 (43 chars)
107
+
assert.strictEqual(jkt1.length, 43);
108
+
// Must only contain base64url characters
109
+
assert.match(jkt1, /^[A-Za-z0-9_-]+$/);
110
+
});
111
+
112
+
test('produces different thumbprints for different keys', async () => {
113
+
const jwk1 = { kty: 'EC', crv: 'P-256', x: 'WbbCfHGZ9QtKsVuMdPZ8hBbP2949N_CSLG3LVV0nnKY', y: 'eSgPlDj0RVMw8t8u4MvCYG4j_JfDwvrMUUwEEHVLmqQ' };
114
+
const jwk2 = { kty: 'EC', crv: 'P-256', x: 'f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU', y: 'x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0' };
115
+
116
+
const jkt1 = await computeJwkThumbprint(jwk1);
117
+
const jkt2 = await computeJwkThumbprint(jwk2);
118
+
119
+
assert.notStrictEqual(jkt1, jkt2);
120
+
});
121
+
});
122
+
```
123
+
124
+
**Step 2: Implement and export**
125
+
126
+
Add to `src/pds.js`:
127
+
128
+
```javascript
129
+
/**
130
+
* Compute JWK thumbprint (SHA-256) per RFC 7638.
131
+
* Creates a canonical JSON representation of EC key required members
132
+
* and returns the base64url-encoded SHA-256 hash.
133
+
* @param {{ kty: string, crv: string, x: string, y: string }} jwk - The EC public key in JWK format
134
+
* @returns {Promise<string>} The base64url-encoded thumbprint
135
+
*/
136
+
export async function computeJwkThumbprint(jwk) {
137
+
// RFC 7638: members must be in lexicographic order
138
+
const thumbprintInput = JSON.stringify({
139
+
crv: jwk.crv,
140
+
kty: jwk.kty,
141
+
x: jwk.x,
142
+
y: jwk.y
143
+
});
144
+
const hash = await crypto.subtle.digest(
145
+
'SHA-256',
146
+
new TextEncoder().encode(thumbprintInput)
147
+
);
148
+
return base64UrlEncode(new Uint8Array(hash));
149
+
}
150
+
```
151
+
152
+
**Step 3: Run tests and commit**
153
+
154
+
```bash
155
+
npm test
156
+
git add src/pds.js test/pds.test.js
157
+
git commit -m "feat(oauth): implement JWK thumbprint computation"
158
+
```
159
+
160
+
---
161
+
162
+
## Task 3: Implement Client Metadata Validation
163
+
164
+
**Files:**
165
+
- Modify: `src/pds.js`
166
+
- Test: `test/pds.test.js`
167
+
168
+
**Step 1: Add unit tests**
169
+
170
+
```javascript
171
+
import {
172
+
// ... existing imports ...
173
+
isLoopbackClient,
174
+
getLoopbackClientMetadata,
175
+
validateClientMetadata,
176
+
} from '../src/pds.js';
177
+
178
+
describe('Client Metadata', () => {
179
+
test('isLoopbackClient detects localhost', () => {
180
+
assert.strictEqual(isLoopbackClient('http://localhost:8080'), true);
181
+
assert.strictEqual(isLoopbackClient('http://127.0.0.1:3000'), true);
182
+
assert.strictEqual(isLoopbackClient('https://example.com'), false);
183
+
});
184
+
185
+
test('getLoopbackClientMetadata returns permissive defaults', () => {
186
+
const metadata = getLoopbackClientMetadata('http://localhost:8080');
187
+
assert.strictEqual(metadata.client_id, 'http://localhost:8080');
188
+
assert.ok(metadata.grant_types.includes('authorization_code'));
189
+
assert.strictEqual(metadata.dpop_bound_access_tokens, true);
190
+
});
191
+
192
+
test('validateClientMetadata rejects mismatched client_id', () => {
193
+
const metadata = {
194
+
client_id: 'https://other.com/metadata.json',
195
+
redirect_uris: ['https://example.com/callback'],
196
+
grant_types: ['authorization_code'],
197
+
response_types: ['code']
198
+
};
199
+
assert.throws(
200
+
() => validateClientMetadata(metadata, 'https://example.com/metadata.json'),
201
+
/client_id mismatch/
202
+
);
203
+
});
204
+
});
205
+
```
206
+
207
+
**Step 2: Implement functions**
208
+
209
+
```javascript
210
+
/**
211
+
* Check if a client_id represents a loopback client (localhost development).
212
+
* Loopback clients are allowed without pre-registration per AT Protocol OAuth spec.
213
+
* @param {string} clientId - The client_id to check
214
+
* @returns {boolean} True if the client_id is a loopback address
215
+
*/
216
+
export function isLoopbackClient(clientId) {
217
+
try {
218
+
const url = new URL(clientId);
219
+
const host = url.hostname.toLowerCase();
220
+
return host === 'localhost' || host === '127.0.0.1' || host === '[::1]';
221
+
} catch {
222
+
return false;
223
+
}
224
+
}
225
+
226
+
/**
227
+
* @typedef {Object} ClientMetadata
228
+
* @property {string} client_id - The client identifier (must match the URL used to fetch metadata)
229
+
* @property {string} [client_name] - Human-readable client name
230
+
* @property {string[]} redirect_uris - Allowed redirect URIs
231
+
* @property {string[]} grant_types - Supported grant types
232
+
* @property {string[]} response_types - Supported response types
233
+
* @property {string} [token_endpoint_auth_method] - Token endpoint auth method
234
+
* @property {boolean} [dpop_bound_access_tokens] - Whether client requires DPoP-bound tokens
235
+
* @property {string} [scope] - Default scope
236
+
*/
237
+
238
+
/**
239
+
* Generate permissive client metadata for a loopback client.
240
+
* @param {string} clientId - The loopback client_id
241
+
* @returns {ClientMetadata} Generated client metadata
242
+
*/
243
+
export function getLoopbackClientMetadata(clientId) {
244
+
return {
245
+
client_id: clientId,
246
+
client_name: 'Loopback Client',
247
+
redirect_uris: [clientId],
248
+
grant_types: ['authorization_code', 'refresh_token'],
249
+
response_types: ['code'],
250
+
token_endpoint_auth_method: 'none',
251
+
dpop_bound_access_tokens: true,
252
+
scope: 'atproto'
253
+
};
254
+
}
255
+
256
+
/**
257
+
* Validate client metadata against AT Protocol OAuth requirements.
258
+
* @param {ClientMetadata} metadata - The client metadata to validate
259
+
* @param {string} expectedClientId - The expected client_id (the URL used to fetch metadata)
260
+
* @throws {Error} If validation fails
261
+
*/
262
+
export function validateClientMetadata(metadata, expectedClientId) {
263
+
if (!metadata.client_id) throw new Error('client_id is required');
264
+
if (metadata.client_id !== expectedClientId) throw new Error('client_id mismatch');
265
+
if (!Array.isArray(metadata.redirect_uris) || metadata.redirect_uris.length === 0) {
266
+
throw new Error('redirect_uris is required');
267
+
}
268
+
if (!metadata.grant_types?.includes('authorization_code')) {
269
+
throw new Error('grant_types must include authorization_code');
270
+
}
271
+
}
272
+
273
+
/** @type {Map<string, { metadata: ClientMetadata, expiresAt: number }>} */
274
+
const clientMetadataCache = new Map();
275
+
276
+
/**
277
+
* Fetch and validate client metadata from a client_id URL.
278
+
* Caches results for 10 minutes. Loopback clients return synthetic metadata.
279
+
* @param {string} clientId - The client_id (URL to fetch metadata from)
280
+
* @returns {Promise<ClientMetadata>} The validated client metadata
281
+
* @throws {Error} If fetching or validation fails
282
+
*/
283
+
async function getClientMetadata(clientId) {
284
+
const cached = clientMetadataCache.get(clientId);
285
+
if (cached && Date.now() < cached.expiresAt) return cached.metadata;
286
+
287
+
if (isLoopbackClient(clientId)) {
288
+
const metadata = getLoopbackClientMetadata(clientId);
289
+
clientMetadataCache.set(clientId, { metadata, expiresAt: Date.now() + 600000 });
290
+
return metadata;
291
+
}
292
+
293
+
const response = await fetch(clientId, { headers: { 'Accept': 'application/json' } });
294
+
if (!response.ok) throw new Error(`Failed to fetch client metadata: ${response.status}`);
295
+
296
+
const metadata = await response.json();
297
+
validateClientMetadata(metadata, clientId);
298
+
clientMetadataCache.set(clientId, { metadata, expiresAt: Date.now() + 600000 });
299
+
return metadata;
300
+
}
301
+
```
302
+
303
+
**Step 3: Run tests and commit**
304
+
305
+
```bash
306
+
npm test
307
+
git add src/pds.js test/pds.test.js
308
+
git commit -m "feat(oauth): implement client metadata fetching and validation"
309
+
```
310
+
311
+
---
312
+
313
+
## Task 4: Implement DPoP Proof Parsing
314
+
315
+
**Files:**
316
+
- Modify: `src/pds.js`
317
+
318
+
**Step 1: Implement parseDpopProof**
319
+
320
+
```javascript
321
+
/**
322
+
* @typedef {Object} DpopProofResult
323
+
* @property {string} jkt - The JWK thumbprint of the DPoP key
324
+
* @property {string} jti - The unique identifier from the DPoP proof
325
+
* @property {{ kty: string, crv: string, x: string, y: string }} jwk - The public key from the proof
326
+
*/
327
+
328
+
/**
329
+
* Parse and validate a DPoP proof JWT.
330
+
* Verifies the signature, checks claims (htm, htu, iat, jti), and optionally
331
+
* validates key binding (expectedJkt) and access token hash (ath).
332
+
* @param {string} proof - The DPoP proof JWT
333
+
* @param {string} method - The expected HTTP method (htm claim)
334
+
* @param {string} url - The expected request URL (htu claim)
335
+
* @param {string|null} [expectedJkt=null] - If provided, verify the key matches this thumbprint
336
+
* @param {string|null} [accessToken=null] - If provided, verify the ath claim matches this token's hash
337
+
* @returns {Promise<DpopProofResult>} The parsed proof with jkt, jti, and jwk
338
+
* @throws {Error} If validation fails
339
+
*/
340
+
async function parseDpopProof(proof, method, url, expectedJkt = null, accessToken = null) {
341
+
const parts = proof.split('.');
342
+
if (parts.length !== 3) throw new Error('Invalid DPoP proof format');
343
+
344
+
const header = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[0])));
345
+
const payload = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[1])));
346
+
347
+
if (header.typ !== 'dpop+jwt') throw new Error('DPoP proof must have typ dpop+jwt');
348
+
if (header.alg !== 'ES256') throw new Error('DPoP proof must use ES256');
349
+
if (!header.jwk || header.jwk.kty !== 'EC') throw new Error('DPoP proof must contain EC key');
350
+
351
+
// Verify signature
352
+
const publicKey = await crypto.subtle.importKey(
353
+
'jwk', header.jwk,
354
+
{ name: 'ECDSA', namedCurve: 'P-256' },
355
+
false, ['verify']
356
+
);
357
+
358
+
const signatureInput = new TextEncoder().encode(parts[0] + '.' + parts[1]);
359
+
const signature = base64UrlDecode(parts[2]);
360
+
const derSignature = compactSignatureToDer(signature);
361
+
362
+
const valid = await crypto.subtle.verify(
363
+
{ name: 'ECDSA', hash: 'SHA-256' },
364
+
publicKey, derSignature, signatureInput
365
+
);
366
+
if (!valid) throw new Error('DPoP proof signature invalid');
367
+
368
+
// Validate claims
369
+
if (payload.htm !== method) throw new Error('DPoP htm mismatch');
370
+
371
+
const normalizeUrl = (u) => u.replace(/\/$/, '').split('?')[0].toLowerCase();
372
+
if (normalizeUrl(payload.htu) !== normalizeUrl(url)) throw new Error('DPoP htu mismatch');
373
+
374
+
const now = Math.floor(Date.now() / 1000);
375
+
if (!payload.iat || payload.iat > now + 60 || payload.iat < now - 300) {
376
+
throw new Error('DPoP proof expired or invalid iat');
377
+
}
378
+
379
+
if (!payload.jti) throw new Error('DPoP proof missing jti');
380
+
381
+
const jkt = await computeJwkThumbprint(header.jwk);
382
+
if (expectedJkt && jkt !== expectedJkt) throw new Error('DPoP key mismatch');
383
+
384
+
if (accessToken) {
385
+
const tokenHash = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(accessToken));
386
+
const expectedAth = base64UrlEncode(new Uint8Array(tokenHash));
387
+
if (payload.ath !== expectedAth) throw new Error('DPoP ath mismatch');
388
+
}
389
+
390
+
return { jkt, jti: payload.jti, jwk: header.jwk };
391
+
}
392
+
393
+
/**
394
+
* Convert a compact (r||s) ECDSA signature to DER format for Web Crypto API.
395
+
* @param {Uint8Array} compact - The 64-byte compact signature (32 bytes r + 32 bytes s)
396
+
* @returns {Uint8Array} The DER-encoded signature
397
+
*/
398
+
function compactSignatureToDer(compact) {
399
+
const r = compact.slice(0, 32);
400
+
const s = compact.slice(32, 64);
401
+
402
+
/**
403
+
* @param {Uint8Array} bytes
404
+
* @returns {Uint8Array}
405
+
*/
406
+
function encodeInt(bytes) {
407
+
let i = 0;
408
+
while (i < bytes.length - 1 && bytes[i] === 0 && !(bytes[i + 1] & 0x80)) i++;
409
+
const trimmed = bytes.slice(i);
410
+
if (trimmed[0] & 0x80) return new Uint8Array([0x02, trimmed.length + 1, 0, ...trimmed]);
411
+
return new Uint8Array([0x02, trimmed.length, ...trimmed]);
412
+
}
413
+
414
+
const rDer = encodeInt(r);
415
+
const sDer = encodeInt(s);
416
+
return new Uint8Array([0x30, rDer.length + sDer.length, ...rDer, ...sDer]);
417
+
}
418
+
```
419
+
420
+
**Step 2: Commit**
421
+
422
+
```bash
423
+
git add src/pds.js
424
+
git commit -m "feat(oauth): implement DPoP proof parsing"
425
+
```
426
+
427
+
---
428
+
429
+
## Task 5: Add OAuth Discovery Endpoints
430
+
431
+
**Files:**
432
+
- Modify: `src/pds.js`
433
+
434
+
**Step 1: Add endpoints to handleRequest**
435
+
436
+
```javascript
437
+
// OAuth Authorization Server Metadata
438
+
if (path === '/.well-known/oauth-authorization-server' && method === 'GET') {
439
+
const issuer = `${url.protocol}//${url.host}`;
440
+
return json({
441
+
issuer,
442
+
authorization_endpoint: `${issuer}/oauth/authorize`,
443
+
token_endpoint: `${issuer}/oauth/token`,
444
+
revocation_endpoint: `${issuer}/oauth/revoke`,
445
+
pushed_authorization_request_endpoint: `${issuer}/oauth/par`,
446
+
jwks_uri: `${issuer}/oauth/jwks`,
447
+
scopes_supported: ['atproto'],
448
+
response_types_supported: ['code'],
449
+
grant_types_supported: ['authorization_code', 'refresh_token'],
450
+
code_challenge_methods_supported: ['S256'],
451
+
token_endpoint_auth_methods_supported: ['none'],
452
+
dpop_signing_alg_values_supported: ['ES256'],
453
+
require_pushed_authorization_requests: true,
454
+
authorization_response_iss_parameter_supported: true
455
+
});
456
+
}
457
+
458
+
// OAuth Protected Resource Metadata
459
+
if (path === '/.well-known/oauth-protected-resource' && method === 'GET') {
460
+
const resource = `${url.protocol}//${url.host}`;
461
+
return json({
462
+
resource,
463
+
authorization_servers: [resource],
464
+
bearer_methods_supported: ['header'],
465
+
scopes_supported: ['atproto']
466
+
});
467
+
}
468
+
469
+
// JWKS endpoint
470
+
if (path === '/oauth/jwks' && method === 'GET') {
471
+
const publicKeyJwk = await getPublicKeyJwk(this);
472
+
return json({
473
+
keys: [{ ...publicKeyJwk, kid: 'pds-oauth-key', use: 'sig', alg: 'ES256' }]
474
+
});
475
+
}
476
+
```
477
+
478
+
**Step 2: Add getPublicKeyJwk helper**
479
+
480
+
```javascript
481
+
/**
482
+
* Get the PDS signing key as a public JWK.
483
+
* Exports only the public components (kty, crv, x, y) for use in JWKS.
484
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
485
+
* @returns {Promise<{ kty: string, crv: string, x: string, y: string }>} The public key in JWK format
486
+
* @throws {Error} If the PDS is not initialized
487
+
*/
488
+
async function getPublicKeyJwk(pds) {
489
+
const privateKeyHex = await pds.storage.get('privateKey');
490
+
if (!privateKeyHex) throw new Error('PDS not initialized');
491
+
492
+
const privateKeyBytes = hexToBytes(privateKeyHex);
493
+
const privateKey = await crypto.subtle.importKey(
494
+
'pkcs8', privateKeyBytes,
495
+
{ name: 'ECDSA', namedCurve: 'P-256' }, true, ['sign']
496
+
);
497
+
const jwk = await crypto.subtle.exportKey('jwk', privateKey);
498
+
return { kty: jwk.kty, crv: jwk.crv, x: jwk.x, y: jwk.y };
499
+
}
500
+
```
501
+
502
+
**Step 3: Commit**
503
+
504
+
```bash
505
+
git add src/pds.js
506
+
git commit -m "feat(oauth): add discovery endpoints"
507
+
```
508
+
509
+
---
510
+
511
+
## Task 6: Implement PAR Endpoint
512
+
513
+
**Files:**
514
+
- Modify: `src/pds.js`
515
+
516
+
**Step 1: Add PAR handler**
517
+
518
+
```javascript
519
+
if (path === '/oauth/par' && method === 'POST') {
520
+
return handlePar(request, url, this, env);
521
+
}
522
+
523
+
/**
524
+
* Handle Pushed Authorization Request (PAR) endpoint.
525
+
* Validates DPoP proof, client metadata, PKCE parameters, and stores the authorization request.
526
+
* @param {Request} request - The incoming request
527
+
* @param {URL} url - Parsed request URL
528
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
529
+
* @param {{ PDS_PASSWORD: string }} env - Environment variables
530
+
* @returns {Promise<Response>} JSON response with request_uri and expires_in
531
+
*/
532
+
async function handlePar(request, url, pds, env) {
533
+
const issuer = `${url.protocol}//${url.host}`;
534
+
535
+
const dpopHeader = request.headers.get('DPoP');
536
+
if (!dpopHeader) {
537
+
return json({ error: 'invalid_dpop_proof', error_description: 'DPoP proof required' }, 400);
538
+
}
539
+
540
+
let dpop;
541
+
try {
542
+
dpop = await parseDpopProof(dpopHeader, 'POST', `${issuer}/oauth/par`);
543
+
} catch (err) {
544
+
return json({ error: 'invalid_dpop_proof', error_description: err.message }, 400);
545
+
}
546
+
547
+
const body = await request.text();
548
+
const params = new URLSearchParams(body);
549
+
550
+
const clientId = params.get('client_id');
551
+
const redirectUri = params.get('redirect_uri');
552
+
const responseType = params.get('response_type');
553
+
const scope = params.get('scope');
554
+
const state = params.get('state');
555
+
const codeChallenge = params.get('code_challenge');
556
+
const codeChallengeMethod = params.get('code_challenge_method');
557
+
558
+
if (!clientId) return json({ error: 'invalid_request', error_description: 'client_id required' }, 400);
559
+
if (!redirectUri) return json({ error: 'invalid_request', error_description: 'redirect_uri required' }, 400);
560
+
if (responseType !== 'code') return json({ error: 'unsupported_response_type' }, 400);
561
+
if (!codeChallenge || codeChallengeMethod !== 'S256') {
562
+
return json({ error: 'invalid_request', error_description: 'PKCE with S256 required' }, 400);
563
+
}
564
+
565
+
let clientMetadata;
566
+
try {
567
+
clientMetadata = await getClientMetadata(clientId);
568
+
} catch (err) {
569
+
return json({ error: 'invalid_client', error_description: err.message }, 400);
570
+
}
571
+
572
+
const requestId = crypto.randomUUID();
573
+
const requestUri = `urn:ietf:params:oauth:request_uri:${requestId}`;
574
+
const expiresIn = 600;
575
+
const expiresAt = new Date(Date.now() + expiresIn * 1000).toISOString();
576
+
577
+
const sql = createSql(pds.storage);
578
+
await sql`
579
+
INSERT INTO authorization_requests (
580
+
id, client_id, client_metadata, parameters,
581
+
code_challenge, code_challenge_method, dpop_jkt,
582
+
expires_at, created_at
583
+
) VALUES (
584
+
${requestId}, ${clientId}, ${JSON.stringify(clientMetadata)},
585
+
${JSON.stringify({ redirect_uri: redirectUri, scope, state })},
586
+
${codeChallenge}, ${codeChallengeMethod}, ${dpop.jkt},
587
+
${expiresAt}, ${new Date().toISOString()}
588
+
)
589
+
`;
590
+
591
+
return json({ request_uri: requestUri, expires_in: expiresIn });
592
+
}
593
+
```
594
+
595
+
**Step 2: Commit**
596
+
597
+
```bash
598
+
git add src/pds.js
599
+
git commit -m "feat(oauth): implement PAR endpoint"
600
+
```
601
+
602
+
---
603
+
604
+
## Task 7: Implement Authorization Endpoint
605
+
606
+
**Files:**
607
+
- Modify: `src/pds.js`
608
+
609
+
**Step 1: Add GET handler (consent UI)**
610
+
611
+
```javascript
612
+
if (path === '/oauth/authorize' && method === 'GET') {
613
+
return handleAuthorizeGet(request, url, this, env);
614
+
}
615
+
616
+
/**
617
+
* Handle GET /oauth/authorize - displays the consent UI.
618
+
* Validates the request_uri from PAR and renders a login/consent form.
619
+
* @param {Request} request - The incoming request
620
+
* @param {URL} url - Parsed request URL
621
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
622
+
* @param {{ PDS_PASSWORD: string }} env - Environment variables
623
+
* @returns {Promise<Response>} HTML consent page
624
+
*/
625
+
async function handleAuthorizeGet(request, url, pds, env) {
626
+
const requestUri = url.searchParams.get('request_uri');
627
+
const clientId = url.searchParams.get('client_id');
628
+
629
+
if (!requestUri || !clientId) return new Response('Missing parameters', { status: 400 });
630
+
631
+
const match = requestUri.match(/^urn:ietf:params:oauth:request_uri:(.+)$/);
632
+
if (!match) return new Response('Invalid request_uri', { status: 400 });
633
+
634
+
const sql = createSql(pds.storage);
635
+
const [authRequest] = await sql`
636
+
SELECT * FROM authorization_requests WHERE id = ${match[1]} AND client_id = ${clientId}
637
+
`;
638
+
639
+
if (!authRequest) return new Response('Request not found', { status: 400 });
640
+
if (new Date(authRequest.expires_at) < new Date()) return new Response('Request expired', { status: 400 });
641
+
if (authRequest.code) return new Response('Request already used', { status: 400 });
642
+
643
+
const clientMetadata = JSON.parse(authRequest.client_metadata);
644
+
const parameters = JSON.parse(authRequest.parameters);
645
+
646
+
return new Response(renderConsentPage({
647
+
clientName: clientMetadata.client_name || clientId,
648
+
clientId, scope: parameters.scope || 'atproto', requestUri
649
+
}), { status: 200, headers: { 'Content-Type': 'text/html; charset=utf-8' } });
650
+
}
651
+
652
+
/**
653
+
* Render the OAuth consent page HTML.
654
+
* @param {{ clientName: string, clientId: string, scope: string, requestUri: string, error?: string }} params
655
+
* @returns {string} HTML page content
656
+
*/
657
+
function renderConsentPage({ clientName, clientId, scope, requestUri, error = '' }) {
658
+
/** @param {string} s */
659
+
const escHtml = s => s.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>').replace(/"/g,'"');
660
+
return `<!DOCTYPE html>
661
+
<html><head><meta charset="utf-8"><meta name="viewport" content="width=device-width,initial-scale=1">
662
+
<title>Authorize</title>
663
+
<style>body{font-family:system-ui;max-width:400px;margin:40px auto;padding:20px}
664
+
.error{color:#c00;background:#fee;padding:10px;margin:10px 0}
665
+
button{padding:10px 20px;margin:5px;cursor:pointer}
666
+
.approve{background:#06c;color:#fff;border:none}
667
+
input{width:100%;padding:8px;margin:5px 0;box-sizing:border-box}</style></head>
668
+
<body><h2>Sign in to authorize</h2>
669
+
<p><b>${escHtml(clientName)}</b> wants to access your account.</p>
670
+
<p>Scope: ${escHtml(scope)}</p>
671
+
${error ? `<p class="error">${escHtml(error)}</p>` : ''}
672
+
<form method="POST" action="/oauth/authorize">
673
+
<input type="hidden" name="request_uri" value="${escHtml(requestUri)}">
674
+
<input type="hidden" name="client_id" value="${escHtml(clientId)}">
675
+
<label>Password</label><input type="password" name="password" required autofocus>
676
+
<div><button type="submit" name="action" value="deny">Deny</button>
677
+
<button type="submit" name="action" value="approve" class="approve">Authorize</button></div>
678
+
</form></body></html>`;
679
+
}
680
+
```
681
+
682
+
**Step 2: Add POST handler (approval)**
683
+
684
+
```javascript
685
+
if (path === '/oauth/authorize' && method === 'POST') {
686
+
return handleAuthorizePost(request, url, this, env);
687
+
}
688
+
689
+
/**
690
+
* Handle POST /oauth/authorize - processes user approval/denial.
691
+
* Validates password, generates authorization code on approval, redirects to client.
692
+
* @param {Request} request - The incoming request
693
+
* @param {URL} url - Parsed request URL
694
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
695
+
* @param {{ PDS_PASSWORD: string }} env - Environment variables
696
+
* @returns {Promise<Response>} Redirect to client redirect_uri with code or error
697
+
*/
698
+
async function handleAuthorizePost(request, url, pds, env) {
699
+
const issuer = `${url.protocol}//${url.host}`;
700
+
const body = await request.text();
701
+
const params = new URLSearchParams(body);
702
+
703
+
const requestUri = params.get('request_uri');
704
+
const clientId = params.get('client_id');
705
+
const password = params.get('password');
706
+
const action = params.get('action');
707
+
708
+
const match = requestUri?.match(/^urn:ietf:params:oauth:request_uri:(.+)$/);
709
+
if (!match) return new Response('Invalid request_uri', { status: 400 });
710
+
711
+
const sql = createSql(pds.storage);
712
+
const [authRequest] = await sql`
713
+
SELECT * FROM authorization_requests WHERE id = ${match[1]} AND client_id = ${clientId}
714
+
`;
715
+
if (!authRequest) return new Response('Request not found', { status: 400 });
716
+
717
+
const clientMetadata = JSON.parse(authRequest.client_metadata);
718
+
const parameters = JSON.parse(authRequest.parameters);
719
+
720
+
if (action === 'deny') {
721
+
await sql`DELETE FROM authorization_requests WHERE id = ${match[1]}`;
722
+
const errorUrl = new URL(parameters.redirect_uri);
723
+
errorUrl.searchParams.set('error', 'access_denied');
724
+
if (parameters.state) errorUrl.searchParams.set('state', parameters.state);
725
+
errorUrl.searchParams.set('iss', issuer);
726
+
return Response.redirect(errorUrl.toString(), 302);
727
+
}
728
+
729
+
if (password !== env.PDS_PASSWORD) {
730
+
return new Response(renderConsentPage({
731
+
clientName: clientMetadata.client_name || clientId,
732
+
clientId, scope: parameters.scope || 'atproto', requestUri, error: 'Invalid password'
733
+
}), { status: 200, headers: { 'Content-Type': 'text/html; charset=utf-8' } });
734
+
}
735
+
736
+
const code = base64UrlEncode(crypto.getRandomValues(new Uint8Array(32)));
737
+
const did = await pds.storage.get('did');
738
+
739
+
await sql`UPDATE authorization_requests SET code = ${code}, did = ${did} WHERE id = ${match[1]}`;
740
+
741
+
const successUrl = new URL(parameters.redirect_uri);
742
+
successUrl.searchParams.set('code', code);
743
+
if (parameters.state) successUrl.searchParams.set('state', parameters.state);
744
+
successUrl.searchParams.set('iss', issuer);
745
+
return Response.redirect(successUrl.toString(), 302);
746
+
}
747
+
```
748
+
749
+
**Step 3: Commit**
750
+
751
+
```bash
752
+
git add src/pds.js
753
+
git commit -m "feat(oauth): implement authorization endpoint with consent UI"
754
+
```
755
+
756
+
---
757
+
758
+
## Task 8: Implement Token Endpoint
759
+
760
+
**Files:**
761
+
- Modify: `src/pds.js`
762
+
763
+
**Step 1: Add token handler**
764
+
765
+
```javascript
766
+
if (path === '/oauth/token' && method === 'POST') {
767
+
return handleToken(request, url, this, env);
768
+
}
769
+
770
+
/**
771
+
* Handle token endpoint - exchanges authorization codes for tokens.
772
+
* Supports authorization_code and refresh_token grant types.
773
+
* @param {Request} request - The incoming request
774
+
* @param {URL} url - Parsed request URL
775
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
776
+
* @param {{ PDS_PASSWORD: string }} env - Environment variables
777
+
* @returns {Promise<Response>} JSON response with access_token, token_type, expires_in, refresh_token, scope
778
+
*/
779
+
async function handleToken(request, url, pds, env) {
780
+
const issuer = `${url.protocol}//${url.host}`;
781
+
782
+
const dpopHeader = request.headers.get('DPoP');
783
+
if (!dpopHeader) return json({ error: 'invalid_dpop_proof', error_description: 'DPoP required' }, 400);
784
+
785
+
let dpop;
786
+
try {
787
+
dpop = await parseDpopProof(dpopHeader, 'POST', `${issuer}/oauth/token`);
788
+
} catch (err) {
789
+
return json({ error: 'invalid_dpop_proof', error_description: err.message }, 400);
790
+
}
791
+
792
+
const body = await request.text();
793
+
const params = new URLSearchParams(body);
794
+
const grantType = params.get('grant_type');
795
+
796
+
if (grantType === 'authorization_code') {
797
+
return handleAuthCodeGrant(params, dpop, issuer, pds);
798
+
} else if (grantType === 'refresh_token') {
799
+
return handleRefreshGrant(params, dpop, issuer, pds);
800
+
}
801
+
return json({ error: 'unsupported_grant_type' }, 400);
802
+
}
803
+
804
+
/**
805
+
* Handle authorization_code grant type.
806
+
* Validates the code, PKCE verifier, and DPoP binding, then issues tokens.
807
+
* @param {URLSearchParams} params - Token request parameters
808
+
* @param {DpopProofResult} dpop - Parsed DPoP proof
809
+
* @param {string} issuer - The PDS issuer URL
810
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
811
+
* @returns {Promise<Response>} JSON token response
812
+
*/
813
+
async function handleAuthCodeGrant(params, dpop, issuer, pds) {
814
+
const code = params.get('code');
815
+
const redirectUri = params.get('redirect_uri');
816
+
const clientId = params.get('client_id');
817
+
const codeVerifier = params.get('code_verifier');
818
+
819
+
if (!code || !redirectUri || !clientId || !codeVerifier) {
820
+
return json({ error: 'invalid_request' }, 400);
821
+
}
822
+
823
+
const sql = createSql(pds.storage);
824
+
const [authRequest] = await sql`SELECT * FROM authorization_requests WHERE code = ${code}`;
825
+
if (!authRequest) return json({ error: 'invalid_grant', error_description: 'Invalid code' }, 400);
826
+
if (authRequest.client_id !== clientId) return json({ error: 'invalid_grant' }, 400);
827
+
if (authRequest.dpop_jkt !== dpop.jkt) return json({ error: 'invalid_dpop_proof' }, 400);
828
+
829
+
const parameters = JSON.parse(authRequest.parameters);
830
+
if (parameters.redirect_uri !== redirectUri) return json({ error: 'invalid_grant' }, 400);
831
+
832
+
// Verify PKCE
833
+
const challengeHash = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(codeVerifier));
834
+
const computedChallenge = base64UrlEncode(new Uint8Array(challengeHash));
835
+
if (computedChallenge !== authRequest.code_challenge) {
836
+
return json({ error: 'invalid_grant', error_description: 'Invalid code_verifier' }, 400);
837
+
}
838
+
839
+
await sql`DELETE FROM authorization_requests WHERE id = ${authRequest.id}`;
840
+
841
+
const tokenId = crypto.randomUUID();
842
+
const refreshToken = base64UrlEncode(crypto.getRandomValues(new Uint8Array(32)));
843
+
const scope = parameters.scope || 'atproto';
844
+
const now = new Date();
845
+
const expiresIn = 3600;
846
+
847
+
const accessToken = await createOAuthAccessToken({
848
+
issuer, subject: authRequest.did, clientId, scope, tokenId, dpopJkt: dpop.jkt, expiresIn
849
+
}, pds);
850
+
851
+
await sql`
852
+
INSERT INTO tokens (token_id, did, client_id, scope, dpop_jkt, expires_at, refresh_token, created_at, updated_at)
853
+
VALUES (${tokenId}, ${authRequest.did}, ${clientId}, ${scope}, ${dpop.jkt},
854
+
${new Date(now.getTime() + expiresIn * 1000).toISOString()},
855
+
${refreshToken}, ${now.toISOString()}, ${now.toISOString()})
856
+
`;
857
+
858
+
return json({ access_token: accessToken, token_type: 'DPoP', expires_in: expiresIn, refresh_token: refreshToken, scope });
859
+
}
860
+
861
+
/**
862
+
* @typedef {Object} AccessTokenParams
863
+
* @property {string} issuer - The PDS issuer URL
864
+
* @property {string} subject - The DID of the authenticated user
865
+
* @property {string} clientId - The OAuth client_id
866
+
* @property {string} scope - The granted scope
867
+
* @property {string} tokenId - Unique token identifier (jti)
868
+
* @property {string} dpopJkt - The DPoP key thumbprint for token binding
869
+
* @property {number} expiresIn - Token lifetime in seconds
870
+
*/
871
+
872
+
/**
873
+
* Create a DPoP-bound access token (at+jwt).
874
+
* @param {AccessTokenParams} params - Token parameters
875
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
876
+
* @returns {Promise<string>} The signed JWT access token
877
+
*/
878
+
async function createOAuthAccessToken({ issuer, subject, clientId, scope, tokenId, dpopJkt, expiresIn }, pds) {
879
+
const now = Math.floor(Date.now() / 1000);
880
+
const header = { typ: 'at+jwt', alg: 'ES256', kid: 'pds-oauth-key' };
881
+
const payload = {
882
+
iss: issuer, sub: subject, aud: issuer, client_id: clientId,
883
+
scope, jti: tokenId, iat: now, exp: now + expiresIn, cnf: { jkt: dpopJkt }
884
+
};
885
+
886
+
const privateKeyHex = await pds.storage.get('privateKey');
887
+
const privateKey = await importPrivateKey(hexToBytes(privateKeyHex));
888
+
889
+
const headerB64 = base64UrlEncode(new TextEncoder().encode(JSON.stringify(header)));
890
+
const payloadB64 = base64UrlEncode(new TextEncoder().encode(JSON.stringify(payload)));
891
+
const sigInput = new TextEncoder().encode(`${headerB64}.${payloadB64}`);
892
+
const sig = await sign(privateKey, sigInput);
893
+
894
+
return `${headerB64}.${payloadB64}.${base64UrlEncode(sig)}`;
895
+
}
896
+
```
897
+
898
+
**Step 2: Commit**
899
+
900
+
```bash
901
+
git add src/pds.js
902
+
git commit -m "feat(oauth): implement token endpoint"
903
+
```
904
+
905
+
---
906
+
907
+
## Task 9: Implement Refresh Token Grant
908
+
909
+
**Files:**
910
+
- Modify: `src/pds.js`
911
+
912
+
**Step 1: Add refresh handler**
913
+
914
+
```javascript
915
+
/**
916
+
* Handle refresh_token grant type.
917
+
* Validates the refresh token, DPoP binding, and 24hr lifetime, then rotates tokens.
918
+
* @param {URLSearchParams} params - Token request parameters
919
+
* @param {DpopProofResult} dpop - Parsed DPoP proof
920
+
* @param {string} issuer - The PDS issuer URL
921
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
922
+
* @returns {Promise<Response>} JSON token response with new access and refresh tokens
923
+
*/
924
+
async function handleRefreshGrant(params, dpop, issuer, pds) {
925
+
const refreshToken = params.get('refresh_token');
926
+
const clientId = params.get('client_id');
927
+
928
+
if (!refreshToken || !clientId) return json({ error: 'invalid_request' }, 400);
929
+
930
+
const sql = createSql(pds.storage);
931
+
const [token] = await sql`SELECT * FROM tokens WHERE refresh_token = ${refreshToken}`;
932
+
933
+
if (!token) return json({ error: 'invalid_grant', error_description: 'Invalid refresh token' }, 400);
934
+
if (token.client_id !== clientId) return json({ error: 'invalid_grant' }, 400);
935
+
if (token.dpop_jkt !== dpop.jkt) return json({ error: 'invalid_dpop_proof' }, 400);
936
+
937
+
// Check 24hr lifetime
938
+
const createdAt = new Date(token.created_at);
939
+
if (Date.now() - createdAt.getTime() > 24 * 60 * 60 * 1000) {
940
+
await sql`DELETE FROM tokens WHERE id = ${token.id}`;
941
+
return json({ error: 'invalid_grant', error_description: 'Refresh token expired' }, 400);
942
+
}
943
+
944
+
const newTokenId = crypto.randomUUID();
945
+
const newRefreshToken = base64UrlEncode(crypto.getRandomValues(new Uint8Array(32)));
946
+
const now = new Date();
947
+
const expiresIn = 3600;
948
+
949
+
const accessToken = await createOAuthAccessToken({
950
+
issuer, subject: token.did, clientId, scope: token.scope,
951
+
tokenId: newTokenId, dpopJkt: dpop.jkt, expiresIn
952
+
}, pds);
953
+
954
+
await sql`
955
+
UPDATE tokens SET token_id = ${newTokenId}, refresh_token = ${newRefreshToken},
956
+
expires_at = ${new Date(now.getTime() + expiresIn * 1000).toISOString()},
957
+
updated_at = ${now.toISOString()} WHERE id = ${token.id}
958
+
`;
959
+
960
+
return json({ access_token: accessToken, token_type: 'DPoP', expires_in: expiresIn, refresh_token: newRefreshToken, scope: token.scope });
961
+
}
962
+
```
963
+
964
+
**Step 2: Commit**
965
+
966
+
```bash
967
+
git add src/pds.js
968
+
git commit -m "feat(oauth): implement refresh token grant"
969
+
```
970
+
971
+
---
972
+
973
+
## Task 10: Update requireAuth for DPoP Tokens
974
+
975
+
**Files:**
976
+
- Modify: `src/pds.js`
977
+
978
+
**Step 1: Update requireAuth**
979
+
980
+
```javascript
981
+
/**
982
+
* @typedef {Object} AuthResult
983
+
* @property {string} did - The authenticated user's DID
984
+
* @property {string} [scope] - The granted scope (for OAuth tokens)
985
+
*/
986
+
987
+
/**
988
+
* Require authentication for a request.
989
+
* Supports both legacy Bearer tokens (JWT with symmetric key) and OAuth DPoP tokens.
990
+
* @param {Request} request - The incoming request
991
+
* @param {{ JWT_SECRET: string, PDS_PASSWORD: string }} env - Environment variables
992
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
993
+
* @returns {Promise<AuthResult>} The authenticated user's DID and scope
994
+
* @throws {AuthRequiredError} If authentication fails
995
+
*/
996
+
async function requireAuth(request, env, pds) {
997
+
const authHeader = request.headers.get('Authorization');
998
+
if (!authHeader) throw new AuthRequiredError('Authorization required');
999
+
1000
+
if (authHeader.startsWith('Bearer ')) {
1001
+
return verifyAccessJwt(authHeader.slice(7), env.JWT_SECRET);
1002
+
}
1003
+
1004
+
if (authHeader.startsWith('DPoP ')) {
1005
+
return verifyOAuthAccessToken(request, authHeader.slice(5), pds);
1006
+
}
1007
+
1008
+
throw new AuthRequiredError('Invalid authorization type');
1009
+
}
1010
+
1011
+
/**
1012
+
* Verify an OAuth DPoP-bound access token.
1013
+
* Validates the JWT signature, expiration, DPoP binding, and proof.
1014
+
* @param {Request} request - The incoming request (for DPoP validation)
1015
+
* @param {string} token - The access token JWT
1016
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
1017
+
* @returns {Promise<AuthResult>} The authenticated user's DID and scope
1018
+
* @throws {AuthRequiredError} If verification fails
1019
+
*/
1020
+
async function verifyOAuthAccessToken(request, token, pds) {
1021
+
const parts = token.split('.');
1022
+
if (parts.length !== 3) throw new AuthRequiredError('Invalid token format');
1023
+
1024
+
const header = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[0])));
1025
+
if (header.typ !== 'at+jwt') throw new AuthRequiredError('Invalid token type');
1026
+
1027
+
// Verify signature with PDS public key
1028
+
const publicKeyJwk = await getPublicKeyJwk(pds);
1029
+
const publicKey = await crypto.subtle.importKey(
1030
+
'jwk', publicKeyJwk, { name: 'ECDSA', namedCurve: 'P-256' }, false, ['verify']
1031
+
);
1032
+
1033
+
const signatureInput = new TextEncoder().encode(parts[0] + '.' + parts[1]);
1034
+
const signature = base64UrlDecode(parts[2]);
1035
+
1036
+
const valid = await crypto.subtle.verify(
1037
+
{ name: 'ECDSA', hash: 'SHA-256' }, publicKey,
1038
+
compactSignatureToDer(signature), signatureInput
1039
+
);
1040
+
if (!valid) throw new AuthRequiredError('Invalid token signature');
1041
+
1042
+
const payload = JSON.parse(new TextDecoder().decode(base64UrlDecode(parts[1])));
1043
+
1044
+
if (payload.exp && payload.exp < Math.floor(Date.now() / 1000)) {
1045
+
throw new AuthRequiredError('Token expired');
1046
+
}
1047
+
1048
+
if (!payload.cnf?.jkt) throw new AuthRequiredError('Token missing DPoP binding');
1049
+
1050
+
const dpopHeader = request.headers.get('DPoP');
1051
+
if (!dpopHeader) throw new AuthRequiredError('DPoP proof required');
1052
+
1053
+
const url = new URL(request.url);
1054
+
await parseDpopProof(dpopHeader, request.method, `${url.protocol}//${url.host}${url.pathname}`, payload.cnf.jkt, token);
1055
+
1056
+
return { did: payload.sub, scope: payload.scope };
1057
+
}
1058
+
```
1059
+
1060
+
**Step 2: Commit**
1061
+
1062
+
```bash
1063
+
git add src/pds.js
1064
+
git commit -m "feat(oauth): update requireAuth to handle DPoP tokens"
1065
+
```
1066
+
1067
+
---
1068
+
1069
+
## Task 11: Add Revocation Endpoint
1070
+
1071
+
**Files:**
1072
+
- Modify: `src/pds.js`
1073
+
1074
+
**Step 1: Add revoke handler**
1075
+
1076
+
```javascript
1077
+
if (path === '/oauth/revoke' && method === 'POST') {
1078
+
return handleRevoke(request, url, this, env);
1079
+
}
1080
+
1081
+
/**
1082
+
* Handle token revocation endpoint (RFC 7009).
1083
+
* Revokes access tokens and refresh tokens by client_id.
1084
+
* @param {Request} request - The incoming request
1085
+
* @param {URL} url - Parsed request URL
1086
+
* @param {{ storage: DurableObjectStorage }} pds - The PDS instance
1087
+
* @param {{ PDS_PASSWORD: string }} env - Environment variables
1088
+
* @returns {Promise<Response>} Empty 200 response on success
1089
+
*/
1090
+
async function handleRevoke(request, url, pds, env) {
1091
+
const body = await request.text();
1092
+
const params = new URLSearchParams(body);
1093
+
const token = params.get('token');
1094
+
const clientId = params.get('client_id');
1095
+
1096
+
if (!token || !clientId) return json({ error: 'invalid_request' }, 400);
1097
+
1098
+
const sql = createSql(pds.storage);
1099
+
await sql`
1100
+
DELETE FROM tokens WHERE client_id = ${clientId}
1101
+
AND (refresh_token = ${token} OR token_id = ${token})
1102
+
`;
1103
+
1104
+
return new Response(null, { status: 200 });
1105
+
}
1106
+
```
1107
+
1108
+
**Step 2: Commit**
1109
+
1110
+
```bash
1111
+
git add src/pds.js
1112
+
git commit -m "feat(oauth): add token revocation endpoint"
1113
+
```
1114
+
1115
+
---
1116
+
1117
+
## Task 12: Add OAuth E2E Tests
1118
+
1119
+
**Files:**
1120
+
- Modify: `test/e2e.sh`
1121
+
1122
+
**Step 1: Add OAuth tests to e2e.sh**
1123
+
1124
+
Add after the existing tests:
1125
+
1126
+
```bash
1127
+
# OAuth tests
1128
+
echo
1129
+
echo "Testing OAuth endpoints..."
1130
+
1131
+
# Test OAuth Authorization Server Metadata
1132
+
echo "Testing OAuth AS metadata..."
1133
+
AS_METADATA=$(curl -sf "$BASE/.well-known/oauth-authorization-server")
1134
+
echo "$AS_METADATA" | jq -e '.issuer == "'"$BASE"'"' >/dev/null &&
1135
+
pass "AS metadata: issuer matches base URL" || fail "AS metadata: issuer mismatch"
1136
+
echo "$AS_METADATA" | jq -e '.authorization_endpoint == "'"$BASE"'/oauth/authorize"' >/dev/null &&
1137
+
pass "AS metadata: authorization_endpoint" || fail "AS metadata: authorization_endpoint"
1138
+
echo "$AS_METADATA" | jq -e '.token_endpoint == "'"$BASE"'/oauth/token"' >/dev/null &&
1139
+
pass "AS metadata: token_endpoint" || fail "AS metadata: token_endpoint"
1140
+
echo "$AS_METADATA" | jq -e '.pushed_authorization_request_endpoint == "'"$BASE"'/oauth/par"' >/dev/null &&
1141
+
pass "AS metadata: PAR endpoint" || fail "AS metadata: PAR endpoint"
1142
+
echo "$AS_METADATA" | jq -e '.revocation_endpoint == "'"$BASE"'/oauth/revoke"' >/dev/null &&
1143
+
pass "AS metadata: revocation_endpoint" || fail "AS metadata: revocation_endpoint"
1144
+
echo "$AS_METADATA" | jq -e '.jwks_uri == "'"$BASE"'/oauth/jwks"' >/dev/null &&
1145
+
pass "AS metadata: jwks_uri" || fail "AS metadata: jwks_uri"
1146
+
echo "$AS_METADATA" | jq -e '.scopes_supported | contains(["atproto"])' >/dev/null &&
1147
+
pass "AS metadata: scopes_supported includes atproto" || fail "AS metadata: scopes_supported"
1148
+
echo "$AS_METADATA" | jq -e '.response_types_supported | contains(["code"])' >/dev/null &&
1149
+
pass "AS metadata: response_types_supported" || fail "AS metadata: response_types_supported"
1150
+
echo "$AS_METADATA" | jq -e '.grant_types_supported | contains(["authorization_code", "refresh_token"])' >/dev/null &&
1151
+
pass "AS metadata: grant_types_supported" || fail "AS metadata: grant_types_supported"
1152
+
echo "$AS_METADATA" | jq -e '.code_challenge_methods_supported | contains(["S256"])' >/dev/null &&
1153
+
pass "AS metadata: PKCE S256 supported" || fail "AS metadata: PKCE S256"
1154
+
echo "$AS_METADATA" | jq -e '.dpop_signing_alg_values_supported | contains(["ES256"])' >/dev/null &&
1155
+
pass "AS metadata: DPoP ES256 supported" || fail "AS metadata: DPoP ES256"
1156
+
echo "$AS_METADATA" | jq -e '.require_pushed_authorization_requests == true' >/dev/null &&
1157
+
pass "AS metadata: PAR required" || fail "AS metadata: PAR required"
1158
+
echo "$AS_METADATA" | jq -e '.authorization_response_iss_parameter_supported == true' >/dev/null &&
1159
+
pass "AS metadata: iss parameter supported" || fail "AS metadata: iss parameter"
1160
+
1161
+
# Test OAuth Protected Resource Metadata
1162
+
echo "Testing OAuth PR metadata..."
1163
+
PR_METADATA=$(curl -sf "$BASE/.well-known/oauth-protected-resource")
1164
+
echo "$PR_METADATA" | jq -e '.resource == "'"$BASE"'"' >/dev/null &&
1165
+
pass "PR metadata: resource matches base URL" || fail "PR metadata: resource mismatch"
1166
+
echo "$PR_METADATA" | jq -e '.authorization_servers | contains(["'"$BASE"'"])' >/dev/null &&
1167
+
pass "PR metadata: authorization_servers" || fail "PR metadata: authorization_servers"
1168
+
echo "$PR_METADATA" | jq -e '.scopes_supported | contains(["atproto"])' >/dev/null &&
1169
+
pass "PR metadata: scopes_supported" || fail "PR metadata: scopes_supported"
1170
+
1171
+
# Test JWKS endpoint
1172
+
echo "Testing JWKS endpoint..."
1173
+
JWKS=$(curl -sf "$BASE/oauth/jwks")
1174
+
echo "$JWKS" | jq -e '.keys | length > 0' >/dev/null &&
1175
+
pass "JWKS: has at least one key" || fail "JWKS: no keys"
1176
+
echo "$JWKS" | jq -e '.keys[0].kty == "EC"' >/dev/null &&
1177
+
pass "JWKS: key is EC type" || fail "JWKS: key type"
1178
+
echo "$JWKS" | jq -e '.keys[0].crv == "P-256"' >/dev/null &&
1179
+
pass "JWKS: key uses P-256 curve" || fail "JWKS: curve"
1180
+
echo "$JWKS" | jq -e '.keys[0].alg == "ES256"' >/dev/null &&
1181
+
pass "JWKS: key algorithm is ES256" || fail "JWKS: algorithm"
1182
+
echo "$JWKS" | jq -e '.keys[0].use == "sig"' >/dev/null &&
1183
+
pass "JWKS: key use is sig" || fail "JWKS: key use"
1184
+
echo "$JWKS" | jq -e '.keys[0].kid == "pds-oauth-key"' >/dev/null &&
1185
+
pass "JWKS: kid is pds-oauth-key" || fail "JWKS: kid"
1186
+
echo "$JWKS" | jq -e '.keys[0] | has("x") and has("y")' >/dev/null &&
1187
+
pass "JWKS: has x and y coordinates" || fail "JWKS: coordinates"
1188
+
echo "$JWKS" | jq -e '.keys[0] | has("d") | not' >/dev/null &&
1189
+
pass "JWKS: does not expose private key (d)" || fail "JWKS: private key exposed!"
1190
+
1191
+
# Test PAR endpoint error cases
1192
+
echo "Testing PAR error handling..."
1193
+
PAR_NO_DPOP=$(curl -s -w "\n%{http_code}" -X POST "$BASE/oauth/par" \
1194
+
-H "Content-Type: application/x-www-form-urlencoded" \
1195
+
-d "client_id=http://localhost:3000&redirect_uri=http://localhost:3000/callback&response_type=code&scope=atproto&code_challenge=test&code_challenge_method=S256")
1196
+
PAR_BODY=$(echo "$PAR_NO_DPOP" | head -n -1)
1197
+
PAR_STATUS=$(echo "$PAR_NO_DPOP" | tail -n 1)
1198
+
[ "$PAR_STATUS" = "400" ] && pass "PAR: rejects missing DPoP (400)" || fail "PAR: should reject missing DPoP"
1199
+
echo "$PAR_BODY" | jq -e '.error == "invalid_dpop_proof"' >/dev/null &&
1200
+
pass "PAR: error code is invalid_dpop_proof" || fail "PAR: wrong error code"
1201
+
1202
+
# Test token endpoint error cases
1203
+
echo "Testing token endpoint error handling..."
1204
+
TOKEN_NO_DPOP=$(curl -s -w "\n%{http_code}" -X POST "$BASE/oauth/token" \
1205
+
-H "Content-Type: application/x-www-form-urlencoded" \
1206
+
-d "grant_type=authorization_code&code=fake&client_id=http://localhost:3000")
1207
+
TOKEN_BODY=$(echo "$TOKEN_NO_DPOP" | head -n -1)
1208
+
TOKEN_STATUS=$(echo "$TOKEN_NO_DPOP" | tail -n 1)
1209
+
[ "$TOKEN_STATUS" = "400" ] && pass "Token: rejects missing DPoP (400)" || fail "Token: should reject missing DPoP"
1210
+
echo "$TOKEN_BODY" | jq -e '.error == "invalid_dpop_proof"' >/dev/null &&
1211
+
pass "Token: error code is invalid_dpop_proof" || fail "Token: wrong error code"
1212
+
1213
+
# Test revoke endpoint (should accept without valid token - RFC 7009 says always 200)
1214
+
echo "Testing revoke endpoint..."
1215
+
REVOKE_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST "$BASE/oauth/revoke" \
1216
+
-H "Content-Type: application/x-www-form-urlencoded" \
1217
+
-d "token=nonexistent&client_id=http://localhost:3000")
1218
+
[ "$REVOKE_STATUS" = "200" ] && pass "Revoke: returns 200 even for invalid token" || fail "Revoke: should always return 200"
1219
+
1220
+
echo
1221
+
echo "All OAuth endpoint tests passed!"
1222
+
```
1223
+
1224
+
**Step 2: Commit**
1225
+
1226
+
```bash
1227
+
git add test/e2e.sh
1228
+
git commit -m "test(oauth): add comprehensive OAuth e2e tests"
1229
+
```
1230
+
1231
+
---
1232
+
1233
+
## Task 13: Run Typecheck and Fix Any Errors
1234
+
1235
+
**Files:**
1236
+
- Modify: `src/pds.js` (if needed)
1237
+
1238
+
**Step 1: Run TypeScript type checking**
1239
+
1240
+
```bash
1241
+
npm run typecheck
1242
+
```
1243
+
1244
+
Expect: No type errors. If there are errors, fix them before continuing.
1245
+
1246
+
**Step 2: Run unit tests**
1247
+
1248
+
```bash
1249
+
npm test
1250
+
```
1251
+
1252
+
Expect: All tests pass.
1253
+
1254
+
**Step 3: Run e2e tests**
1255
+
1256
+
Start wrangler dev in one terminal, then run:
1257
+
1258
+
```bash
1259
+
./test/e2e.sh
1260
+
```
1261
+
1262
+
Expect: All tests pass.
1263
+
1264
+
**Step 4: Final commit (if any fixes were needed)**
1265
+
1266
+
```bash
1267
+
git add src/pds.js
1268
+
git commit -m "fix(oauth): address typecheck errors"
1269
+
```
1270
+
1271
+
---
1272
+
1273
+
## Summary
1274
+
1275
+
This plan implements AT Protocol OAuth with:
1276
+
- PAR (Pushed Authorization Requests)
1277
+
- DPoP (Demonstration of Proof-of-Possession)
1278
+
- PKCE (Proof Key for Code Exchange)
1279
+
- Authorization code flow with consent UI
1280
+
- Token refresh and revocation
1281
+
- Backward compatibility with existing Bearer tokens
1282
+
1283
+
All implemented with zero external dependencies using Web Crypto APIs.
+902
docs/plans/2026-01-07-scope-validation.md
+902
docs/plans/2026-01-07-scope-validation.md
···
1
+
# OAuth Scope Validation Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Implement granular OAuth scope validation matching the official atproto PDS behavior for repo, blob, and transition scopes.
6
+
7
+
**Architecture:** Add a `ScopePermissions` class that parses scope strings and provides `allowsRepo(collection, action)` and `allowsBlob(mime)` methods. Replace `hasRequiredScope()` calls with permission checks at each write endpoint. Support `atproto` and `transition:generic` as full-access scopes.
8
+
9
+
**Tech Stack:** Pure JavaScript, no dependencies. Node.js test runner for TDD.
10
+
11
+
---
12
+
13
+
## Task 1: Parse Repo Scopes
14
+
15
+
**Files:**
16
+
- Modify: `src/pds.js` (add after `hasRequiredScope` function ~line 4565)
17
+
- Test: `test/pds.test.js` (add new describe block)
18
+
19
+
**Step 1: Write the failing tests**
20
+
21
+
Add to `test/pds.test.js`:
22
+
23
+
```javascript
24
+
import {
25
+
// ... existing imports ...
26
+
parseRepoScope,
27
+
} from '../src/pds.js';
28
+
29
+
describe('Scope Parsing', () => {
30
+
describe('parseRepoScope', () => {
31
+
test('parses wildcard collection with single action', () => {
32
+
const result = parseRepoScope('repo:*:create');
33
+
assert.deepStrictEqual(result, {
34
+
collections: ['*'],
35
+
actions: ['create'],
36
+
});
37
+
});
38
+
39
+
test('parses specific collection with single action', () => {
40
+
const result = parseRepoScope('repo:app.bsky.feed.post:create');
41
+
assert.deepStrictEqual(result, {
42
+
collections: ['app.bsky.feed.post'],
43
+
actions: ['create'],
44
+
});
45
+
});
46
+
47
+
test('parses multiple actions', () => {
48
+
const result = parseRepoScope('repo:*:create,update,delete');
49
+
assert.deepStrictEqual(result, {
50
+
collections: ['*'],
51
+
actions: ['create', 'update', 'delete'],
52
+
});
53
+
});
54
+
55
+
test('returns null for non-repo scope', () => {
56
+
assert.strictEqual(parseRepoScope('atproto'), null);
57
+
assert.strictEqual(parseRepoScope('blob:image/*'), null);
58
+
assert.strictEqual(parseRepoScope('transition:generic'), null);
59
+
});
60
+
61
+
test('returns null for invalid repo scope', () => {
62
+
assert.strictEqual(parseRepoScope('repo:'), null);
63
+
assert.strictEqual(parseRepoScope('repo:foo'), null);
64
+
assert.strictEqual(parseRepoScope('repo::create'), null);
65
+
});
66
+
});
67
+
});
68
+
```
69
+
70
+
**Step 2: Run tests to verify they fail**
71
+
72
+
Run: `npm test`
73
+
Expected: FAIL with "parseRepoScope is not exported"
74
+
75
+
**Step 3: Write minimal implementation**
76
+
77
+
Add to `src/pds.js` after the `hasRequiredScope` function (~line 4565):
78
+
79
+
```javascript
80
+
/**
81
+
* Parse a repo scope string into its components.
82
+
* Format: repo:<collection>:<action>[,<action>...]
83
+
* @param {string} scope - The scope string to parse
84
+
* @returns {{ collections: string[], actions: string[] } | null} Parsed scope or null if invalid
85
+
*/
86
+
function parseRepoScope(scope) {
87
+
if (!scope.startsWith('repo:')) return null;
88
+
89
+
const rest = scope.slice(5); // Remove 'repo:'
90
+
const colonIdx = rest.lastIndexOf(':');
91
+
if (colonIdx === -1 || colonIdx === 0 || colonIdx === rest.length - 1) {
92
+
return null;
93
+
}
94
+
95
+
const collection = rest.slice(0, colonIdx);
96
+
const actionsStr = rest.slice(colonIdx + 1);
97
+
98
+
if (!collection || !actionsStr) return null;
99
+
100
+
const actions = actionsStr.split(',').filter(a => a);
101
+
if (actions.length === 0) return null;
102
+
103
+
return {
104
+
collections: [collection],
105
+
actions,
106
+
};
107
+
}
108
+
```
109
+
110
+
Add `parseRepoScope` to the exports at the end of the file.
111
+
112
+
**Step 4: Run tests to verify they pass**
113
+
114
+
Run: `npm test`
115
+
Expected: PASS
116
+
117
+
**Step 5: Commit**
118
+
119
+
```bash
120
+
git add src/pds.js test/pds.test.js
121
+
git commit -m "feat(scope): add parseRepoScope function"
122
+
```
123
+
124
+
---
125
+
126
+
## Task 2: Parse Blob Scopes with MIME Matching
127
+
128
+
**Files:**
129
+
- Modify: `src/pds.js`
130
+
- Test: `test/pds.test.js`
131
+
132
+
**Step 1: Write the failing tests**
133
+
134
+
Add to test file:
135
+
136
+
```javascript
137
+
import {
138
+
// ... existing imports ...
139
+
parseBlobScope,
140
+
matchesMime,
141
+
} from '../src/pds.js';
142
+
143
+
describe('parseBlobScope', () => {
144
+
test('parses wildcard MIME', () => {
145
+
const result = parseBlobScope('blob:*/*');
146
+
assert.deepStrictEqual(result, { accept: ['*/*'] });
147
+
});
148
+
149
+
test('parses type wildcard', () => {
150
+
const result = parseBlobScope('blob:image/*');
151
+
assert.deepStrictEqual(result, { accept: ['image/*'] });
152
+
});
153
+
154
+
test('parses specific MIME', () => {
155
+
const result = parseBlobScope('blob:image/png');
156
+
assert.deepStrictEqual(result, { accept: ['image/png'] });
157
+
});
158
+
159
+
test('parses multiple MIMEs', () => {
160
+
const result = parseBlobScope('blob:image/png,image/jpeg');
161
+
assert.deepStrictEqual(result, { accept: ['image/png', 'image/jpeg'] });
162
+
});
163
+
164
+
test('returns null for non-blob scope', () => {
165
+
assert.strictEqual(parseBlobScope('atproto'), null);
166
+
assert.strictEqual(parseBlobScope('repo:*:create'), null);
167
+
});
168
+
});
169
+
170
+
describe('matchesMime', () => {
171
+
test('wildcard matches everything', () => {
172
+
assert.strictEqual(matchesMime('*/*', 'image/png'), true);
173
+
assert.strictEqual(matchesMime('*/*', 'video/mp4'), true);
174
+
});
175
+
176
+
test('type wildcard matches same type', () => {
177
+
assert.strictEqual(matchesMime('image/*', 'image/png'), true);
178
+
assert.strictEqual(matchesMime('image/*', 'image/jpeg'), true);
179
+
assert.strictEqual(matchesMime('image/*', 'video/mp4'), false);
180
+
});
181
+
182
+
test('exact match', () => {
183
+
assert.strictEqual(matchesMime('image/png', 'image/png'), true);
184
+
assert.strictEqual(matchesMime('image/png', 'image/jpeg'), false);
185
+
});
186
+
187
+
test('case insensitive', () => {
188
+
assert.strictEqual(matchesMime('image/PNG', 'image/png'), true);
189
+
assert.strictEqual(matchesMime('IMAGE/*', 'image/png'), true);
190
+
});
191
+
});
192
+
```
193
+
194
+
**Step 2: Run tests to verify they fail**
195
+
196
+
Run: `npm test`
197
+
Expected: FAIL
198
+
199
+
**Step 3: Write minimal implementation**
200
+
201
+
```javascript
202
+
/**
203
+
* Parse a blob scope string into its components.
204
+
* Format: blob:<mime>[,<mime>...]
205
+
* @param {string} scope - The scope string to parse
206
+
* @returns {{ accept: string[] } | null} Parsed scope or null if invalid
207
+
*/
208
+
function parseBlobScope(scope) {
209
+
if (!scope.startsWith('blob:')) return null;
210
+
211
+
const mimeStr = scope.slice(5); // Remove 'blob:'
212
+
if (!mimeStr) return null;
213
+
214
+
const accept = mimeStr.split(',').filter(m => m);
215
+
if (accept.length === 0) return null;
216
+
217
+
return { accept };
218
+
}
219
+
220
+
/**
221
+
* Check if a MIME pattern matches an actual MIME type.
222
+
* @param {string} pattern - MIME pattern (e.g., 'image/*', '*/*', 'image/png')
223
+
* @param {string} mime - Actual MIME type to check
224
+
* @returns {boolean} Whether the pattern matches
225
+
*/
226
+
function matchesMime(pattern, mime) {
227
+
const p = pattern.toLowerCase();
228
+
const m = mime.toLowerCase();
229
+
230
+
if (p === '*/*') return true;
231
+
232
+
if (p.endsWith('/*')) {
233
+
const pType = p.slice(0, -2);
234
+
const mType = m.split('/')[0];
235
+
return pType === mType;
236
+
}
237
+
238
+
return p === m;
239
+
}
240
+
```
241
+
242
+
Add exports.
243
+
244
+
**Step 4: Run tests to verify they pass**
245
+
246
+
Run: `npm test`
247
+
Expected: PASS
248
+
249
+
**Step 5: Commit**
250
+
251
+
```bash
252
+
git add src/pds.js test/pds.test.js
253
+
git commit -m "feat(scope): add parseBlobScope and matchesMime functions"
254
+
```
255
+
256
+
---
257
+
258
+
## Task 3: Create ScopePermissions Class
259
+
260
+
**Files:**
261
+
- Modify: `src/pds.js`
262
+
- Test: `test/pds.test.js`
263
+
264
+
**Step 1: Write the failing tests**
265
+
266
+
```javascript
267
+
import {
268
+
// ... existing imports ...
269
+
ScopePermissions,
270
+
} from '../src/pds.js';
271
+
272
+
describe('ScopePermissions', () => {
273
+
describe('static scopes', () => {
274
+
test('atproto grants full access', () => {
275
+
const perms = new ScopePermissions('atproto');
276
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'create'), true);
277
+
assert.strictEqual(perms.allowsRepo('any.collection', 'delete'), true);
278
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
279
+
assert.strictEqual(perms.allowsBlob('video/mp4'), true);
280
+
});
281
+
282
+
test('transition:generic grants full repo/blob access', () => {
283
+
const perms = new ScopePermissions('transition:generic');
284
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'create'), true);
285
+
assert.strictEqual(perms.allowsRepo('any.collection', 'delete'), true);
286
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
287
+
});
288
+
});
289
+
290
+
describe('repo scopes', () => {
291
+
test('wildcard collection allows any collection', () => {
292
+
const perms = new ScopePermissions('repo:*:create');
293
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'create'), true);
294
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.like', 'create'), true);
295
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'delete'), false);
296
+
});
297
+
298
+
test('specific collection restricts to that collection', () => {
299
+
const perms = new ScopePermissions('repo:app.bsky.feed.post:create');
300
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'create'), true);
301
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.like', 'create'), false);
302
+
});
303
+
304
+
test('multiple actions', () => {
305
+
const perms = new ScopePermissions('repo:*:create,update');
306
+
assert.strictEqual(perms.allowsRepo('x', 'create'), true);
307
+
assert.strictEqual(perms.allowsRepo('x', 'update'), true);
308
+
assert.strictEqual(perms.allowsRepo('x', 'delete'), false);
309
+
});
310
+
311
+
test('multiple scopes combine', () => {
312
+
const perms = new ScopePermissions('repo:app.bsky.feed.post:create repo:app.bsky.feed.like:delete');
313
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'create'), true);
314
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.like', 'delete'), true);
315
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'delete'), false);
316
+
});
317
+
});
318
+
319
+
describe('blob scopes', () => {
320
+
test('wildcard allows any MIME', () => {
321
+
const perms = new ScopePermissions('blob:*/*');
322
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
323
+
assert.strictEqual(perms.allowsBlob('video/mp4'), true);
324
+
});
325
+
326
+
test('type wildcard restricts to type', () => {
327
+
const perms = new ScopePermissions('blob:image/*');
328
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
329
+
assert.strictEqual(perms.allowsBlob('image/jpeg'), true);
330
+
assert.strictEqual(perms.allowsBlob('video/mp4'), false);
331
+
});
332
+
333
+
test('specific MIME restricts exactly', () => {
334
+
const perms = new ScopePermissions('blob:image/png');
335
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
336
+
assert.strictEqual(perms.allowsBlob('image/jpeg'), false);
337
+
});
338
+
});
339
+
340
+
describe('empty/no scope', () => {
341
+
test('no scope denies everything', () => {
342
+
const perms = new ScopePermissions('');
343
+
assert.strictEqual(perms.allowsRepo('x', 'create'), false);
344
+
assert.strictEqual(perms.allowsBlob('image/png'), false);
345
+
});
346
+
347
+
test('undefined scope denies everything', () => {
348
+
const perms = new ScopePermissions(undefined);
349
+
assert.strictEqual(perms.allowsRepo('x', 'create'), false);
350
+
});
351
+
});
352
+
353
+
describe('assertRepo', () => {
354
+
test('throws ScopeMissingError when denied', () => {
355
+
const perms = new ScopePermissions('repo:app.bsky.feed.post:create');
356
+
assert.throws(
357
+
() => perms.assertRepo('app.bsky.feed.like', 'create'),
358
+
{ message: /Missing required scope/ }
359
+
);
360
+
});
361
+
362
+
test('does not throw when allowed', () => {
363
+
const perms = new ScopePermissions('repo:app.bsky.feed.post:create');
364
+
assert.doesNotThrow(() => perms.assertRepo('app.bsky.feed.post', 'create'));
365
+
});
366
+
});
367
+
368
+
describe('assertBlob', () => {
369
+
test('throws ScopeMissingError when denied', () => {
370
+
const perms = new ScopePermissions('blob:image/*');
371
+
assert.throws(
372
+
() => perms.assertBlob('video/mp4'),
373
+
{ message: /Missing required scope/ }
374
+
);
375
+
});
376
+
377
+
test('does not throw when allowed', () => {
378
+
const perms = new ScopePermissions('blob:image/*');
379
+
assert.doesNotThrow(() => perms.assertBlob('image/png'));
380
+
});
381
+
});
382
+
});
383
+
```
384
+
385
+
**Step 2: Run tests to verify they fail**
386
+
387
+
Run: `npm test`
388
+
Expected: FAIL
389
+
390
+
**Step 3: Write minimal implementation**
391
+
392
+
```javascript
393
+
/**
394
+
* Error thrown when a required scope is missing.
395
+
*/
396
+
class ScopeMissingError extends Error {
397
+
/**
398
+
* @param {string} scope - The missing scope
399
+
*/
400
+
constructor(scope) {
401
+
super(`Missing required scope "${scope}"`);
402
+
this.name = 'ScopeMissingError';
403
+
this.scope = scope;
404
+
this.status = 403;
405
+
}
406
+
}
407
+
408
+
/**
409
+
* Parses and checks OAuth scope permissions.
410
+
*/
411
+
class ScopePermissions {
412
+
/**
413
+
* @param {string | undefined} scopeString - Space-separated scope string
414
+
*/
415
+
constructor(scopeString) {
416
+
/** @type {Set<string>} */
417
+
this.scopes = new Set(scopeString ? scopeString.split(' ').filter(s => s) : []);
418
+
419
+
/** @type {Array<{ collections: string[], actions: string[] }>} */
420
+
this.repoPermissions = [];
421
+
422
+
/** @type {Array<{ accept: string[] }>} */
423
+
this.blobPermissions = [];
424
+
425
+
for (const scope of this.scopes) {
426
+
const repo = parseRepoScope(scope);
427
+
if (repo) this.repoPermissions.push(repo);
428
+
429
+
const blob = parseBlobScope(scope);
430
+
if (blob) this.blobPermissions.push(blob);
431
+
}
432
+
}
433
+
434
+
/**
435
+
* Check if full access is granted (atproto or transition:generic).
436
+
* @returns {boolean}
437
+
*/
438
+
hasFullAccess() {
439
+
return this.scopes.has('atproto') || this.scopes.has('transition:generic');
440
+
}
441
+
442
+
/**
443
+
* Check if a repo operation is allowed.
444
+
* @param {string} collection - The collection NSID
445
+
* @param {string} action - The action (create, update, delete)
446
+
* @returns {boolean}
447
+
*/
448
+
allowsRepo(collection, action) {
449
+
if (this.hasFullAccess()) return true;
450
+
451
+
for (const perm of this.repoPermissions) {
452
+
const collectionMatch = perm.collections.includes('*') || perm.collections.includes(collection);
453
+
const actionMatch = perm.actions.includes(action);
454
+
if (collectionMatch && actionMatch) return true;
455
+
}
456
+
457
+
return false;
458
+
}
459
+
460
+
/**
461
+
* Assert that a repo operation is allowed, throwing if not.
462
+
* @param {string} collection - The collection NSID
463
+
* @param {string} action - The action (create, update, delete)
464
+
* @throws {ScopeMissingError}
465
+
*/
466
+
assertRepo(collection, action) {
467
+
if (!this.allowsRepo(collection, action)) {
468
+
throw new ScopeMissingError(`repo:${collection}:${action}`);
469
+
}
470
+
}
471
+
472
+
/**
473
+
* Check if a blob operation is allowed.
474
+
* @param {string} mime - The MIME type of the blob
475
+
* @returns {boolean}
476
+
*/
477
+
allowsBlob(mime) {
478
+
if (this.hasFullAccess()) return true;
479
+
480
+
for (const perm of this.blobPermissions) {
481
+
for (const pattern of perm.accept) {
482
+
if (matchesMime(pattern, mime)) return true;
483
+
}
484
+
}
485
+
486
+
return false;
487
+
}
488
+
489
+
/**
490
+
* Assert that a blob operation is allowed, throwing if not.
491
+
* @param {string} mime - The MIME type of the blob
492
+
* @throws {ScopeMissingError}
493
+
*/
494
+
assertBlob(mime) {
495
+
if (!this.allowsBlob(mime)) {
496
+
throw new ScopeMissingError(`blob:${mime}`);
497
+
}
498
+
}
499
+
}
500
+
```
501
+
502
+
Add exports.
503
+
504
+
**Step 4: Run tests to verify they pass**
505
+
506
+
Run: `npm test`
507
+
Expected: PASS
508
+
509
+
**Step 5: Commit**
510
+
511
+
```bash
512
+
git add src/pds.js test/pds.test.js
513
+
git commit -m "feat(scope): add ScopePermissions class with repo/blob checking"
514
+
```
515
+
516
+
---
517
+
518
+
## Task 4: Integrate Scope Checking into createRecord
519
+
520
+
**Files:**
521
+
- Modify: `src/pds.js` (handleRepoWrite function and createRecord handler)
522
+
- Test: `test/e2e.test.js` (add scope enforcement tests)
523
+
524
+
**Step 1: Understand the current flow**
525
+
526
+
The `handleRepoWrite` function at line ~4597 currently does:
527
+
```javascript
528
+
if (!hasRequiredScope(auth.scope, 'atproto')) {
529
+
return errorResponse('Forbidden', 'Insufficient scope for repo write', 403);
530
+
}
531
+
```
532
+
533
+
This needs to be replaced with per-endpoint scope checking. The collection is in `body.collection`.
534
+
535
+
**Step 2: Modify handleRepoWrite to accept collection and action**
536
+
537
+
Update `handleRepoWrite` in `src/pds.js`:
538
+
539
+
```javascript
540
+
/**
541
+
* @param {Request} request
542
+
* @param {Env} env
543
+
* @param {string} collection - The collection being written to
544
+
* @param {string} action - The action being performed (create, update, delete)
545
+
*/
546
+
async function handleRepoWrite(request, env, collection, action) {
547
+
const auth = await requireAuth(request, env);
548
+
if ('error' in auth) return auth.error;
549
+
550
+
// Validate scope for repo write using granular permissions
551
+
if (auth.scope !== undefined) {
552
+
const permissions = new ScopePermissions(auth.scope);
553
+
if (!permissions.allowsRepo(collection, action)) {
554
+
return errorResponse(
555
+
'Forbidden',
556
+
`Missing required scope "repo:${collection}:${action}"`,
557
+
403,
558
+
);
559
+
}
560
+
}
561
+
// Legacy tokens without scope are trusted (backward compat)
562
+
563
+
// ... rest of function
564
+
}
565
+
```
566
+
567
+
**Step 3: Update createRecord to pass collection and action**
568
+
569
+
Find the createRecord handler in the routes object and update it to extract collection before calling handleRepoWrite.
570
+
571
+
Since createRecord is POST, the collection comes from the body. We need to restructure slightly:
572
+
573
+
```javascript
574
+
// In the route handler for com.atproto.repo.createRecord
575
+
async (request, env) => {
576
+
const auth = await requireAuth(request, env);
577
+
if ('error' in auth) return auth.error;
578
+
579
+
const body = await request.json();
580
+
const collection = body.collection;
581
+
582
+
if (!collection) {
583
+
return errorResponse('InvalidRequest', 'missing collection param', 400);
584
+
}
585
+
586
+
// Validate scope
587
+
if (auth.scope !== undefined) {
588
+
const permissions = new ScopePermissions(auth.scope);
589
+
if (!permissions.allowsRepo(collection, 'create')) {
590
+
return errorResponse(
591
+
'Forbidden',
592
+
`Missing required scope "repo:${collection}:create"`,
593
+
403,
594
+
);
595
+
}
596
+
}
597
+
598
+
// Continue with existing logic...
599
+
}
600
+
```
601
+
602
+
**Step 4: Write E2E test for scope enforcement**
603
+
604
+
Add to `test/e2e.test.js`:
605
+
606
+
```javascript
607
+
describe('Scope Enforcement', () => {
608
+
test('createRecord denied with insufficient scope', async () => {
609
+
// Create OAuth token with limited scope
610
+
const limitedToken = await getOAuthToken('repo:app.bsky.feed.like:create');
611
+
612
+
const response = await fetch(`${PDS_URL}/xrpc/com.atproto.repo.createRecord`, {
613
+
method: 'POST',
614
+
headers: {
615
+
'Content-Type': 'application/json',
616
+
'Authorization': `DPoP ${limitedToken}`,
617
+
'DPoP': dpopProof,
618
+
},
619
+
body: JSON.stringify({
620
+
repo: TEST_DID,
621
+
collection: 'app.bsky.feed.post', // Not allowed by scope
622
+
record: { text: 'test', createdAt: new Date().toISOString() },
623
+
}),
624
+
});
625
+
626
+
assert.strictEqual(response.status, 403);
627
+
const body = await response.json();
628
+
assert.ok(body.message.includes('Missing required scope'));
629
+
});
630
+
631
+
test('createRecord allowed with matching scope', async () => {
632
+
const validToken = await getOAuthToken('repo:app.bsky.feed.post:create');
633
+
634
+
const response = await fetch(`${PDS_URL}/xrpc/com.atproto.repo.createRecord`, {
635
+
method: 'POST',
636
+
headers: {
637
+
'Content-Type': 'application/json',
638
+
'Authorization': `DPoP ${validToken}`,
639
+
'DPoP': dpopProof,
640
+
},
641
+
body: JSON.stringify({
642
+
repo: TEST_DID,
643
+
collection: 'app.bsky.feed.post',
644
+
record: { text: 'test', createdAt: new Date().toISOString() },
645
+
}),
646
+
});
647
+
648
+
assert.strictEqual(response.status, 200);
649
+
});
650
+
});
651
+
```
652
+
653
+
**Step 5: Run E2E tests**
654
+
655
+
Run: `npm run test:e2e`
656
+
Expected: PASS
657
+
658
+
**Step 6: Commit**
659
+
660
+
```bash
661
+
git add src/pds.js test/e2e.test.js
662
+
git commit -m "feat(scope): enforce granular scopes on createRecord"
663
+
```
664
+
665
+
---
666
+
667
+
## Task 5: Integrate Scope Checking into putRecord
668
+
669
+
**Files:**
670
+
- Modify: `src/pds.js`
671
+
672
+
**Step 1: Update putRecord handler**
673
+
674
+
putRecord requires BOTH create AND update permissions (since it can do either):
675
+
676
+
```javascript
677
+
// In putRecord handler
678
+
if (auth.scope !== undefined) {
679
+
const permissions = new ScopePermissions(auth.scope);
680
+
if (!permissions.allowsRepo(collection, 'create') || !permissions.allowsRepo(collection, 'update')) {
681
+
const missing = !permissions.allowsRepo(collection, 'create') ? 'create' : 'update';
682
+
return errorResponse(
683
+
'Forbidden',
684
+
`Missing required scope "repo:${collection}:${missing}"`,
685
+
403,
686
+
);
687
+
}
688
+
}
689
+
```
690
+
691
+
**Step 2: Run tests**
692
+
693
+
Run: `npm test && npm run test:e2e`
694
+
Expected: PASS
695
+
696
+
**Step 3: Commit**
697
+
698
+
```bash
699
+
git add src/pds.js
700
+
git commit -m "feat(scope): enforce granular scopes on putRecord"
701
+
```
702
+
703
+
---
704
+
705
+
## Task 6: Integrate Scope Checking into deleteRecord
706
+
707
+
**Files:**
708
+
- Modify: `src/pds.js`
709
+
710
+
**Step 1: Update deleteRecord handler**
711
+
712
+
```javascript
713
+
// In deleteRecord handler
714
+
if (auth.scope !== undefined) {
715
+
const permissions = new ScopePermissions(auth.scope);
716
+
if (!permissions.allowsRepo(collection, 'delete')) {
717
+
return errorResponse(
718
+
'Forbidden',
719
+
`Missing required scope "repo:${collection}:delete"`,
720
+
403,
721
+
);
722
+
}
723
+
}
724
+
```
725
+
726
+
**Step 2: Run tests**
727
+
728
+
Run: `npm test && npm run test:e2e`
729
+
Expected: PASS
730
+
731
+
**Step 3: Commit**
732
+
733
+
```bash
734
+
git add src/pds.js
735
+
git commit -m "feat(scope): enforce granular scopes on deleteRecord"
736
+
```
737
+
738
+
---
739
+
740
+
## Task 7: Integrate Scope Checking into applyWrites
741
+
742
+
**Files:**
743
+
- Modify: `src/pds.js`
744
+
745
+
**Step 1: Update applyWrites handler**
746
+
747
+
applyWrites must check each write operation individually:
748
+
749
+
```javascript
750
+
// In applyWrites handler
751
+
if (auth.scope !== undefined) {
752
+
const permissions = new ScopePermissions(auth.scope);
753
+
754
+
for (const write of writes) {
755
+
const collection = write.collection;
756
+
let action;
757
+
758
+
if (write.$type === 'com.atproto.repo.applyWrites#create') {
759
+
action = 'create';
760
+
} else if (write.$type === 'com.atproto.repo.applyWrites#update') {
761
+
action = 'update';
762
+
} else if (write.$type === 'com.atproto.repo.applyWrites#delete') {
763
+
action = 'delete';
764
+
} else {
765
+
continue;
766
+
}
767
+
768
+
if (!permissions.allowsRepo(collection, action)) {
769
+
return errorResponse(
770
+
'Forbidden',
771
+
`Missing required scope "repo:${collection}:${action}"`,
772
+
403,
773
+
);
774
+
}
775
+
}
776
+
}
777
+
```
778
+
779
+
**Step 2: Run tests**
780
+
781
+
Run: `npm test && npm run test:e2e`
782
+
Expected: PASS
783
+
784
+
**Step 3: Commit**
785
+
786
+
```bash
787
+
git add src/pds.js
788
+
git commit -m "feat(scope): enforce granular scopes on applyWrites"
789
+
```
790
+
791
+
---
792
+
793
+
## Task 8: Integrate Scope Checking into uploadBlob
794
+
795
+
**Files:**
796
+
- Modify: `src/pds.js` (handleBlobUpload function)
797
+
798
+
**Step 1: Update handleBlobUpload**
799
+
800
+
The MIME type comes from the Content-Type header:
801
+
802
+
```javascript
803
+
async function handleBlobUpload(request, env) {
804
+
const auth = await requireAuth(request, env);
805
+
if ('error' in auth) return auth.error;
806
+
807
+
const contentType = request.headers.get('content-type') || 'application/octet-stream';
808
+
809
+
// Validate scope for blob upload
810
+
if (auth.scope !== undefined) {
811
+
const permissions = new ScopePermissions(auth.scope);
812
+
if (!permissions.allowsBlob(contentType)) {
813
+
return errorResponse(
814
+
'Forbidden',
815
+
`Missing required scope "blob:${contentType}"`,
816
+
403,
817
+
);
818
+
}
819
+
}
820
+
821
+
// ... rest of function
822
+
}
823
+
```
824
+
825
+
**Step 2: Run tests**
826
+
827
+
Run: `npm test && npm run test:e2e`
828
+
Expected: PASS
829
+
830
+
**Step 3: Commit**
831
+
832
+
```bash
833
+
git add src/pds.js
834
+
git commit -m "feat(scope): enforce granular scopes on uploadBlob with MIME matching"
835
+
```
836
+
837
+
---
838
+
839
+
## Task 9: Remove Old hasRequiredScope Calls
840
+
841
+
**Files:**
842
+
- Modify: `src/pds.js`
843
+
844
+
**Step 1: Search and remove old calls**
845
+
846
+
Find all remaining uses of `hasRequiredScope` and either:
847
+
- Remove them (if replaced by ScopePermissions)
848
+
- Keep for legacy non-OAuth paths if needed
849
+
850
+
**Step 2: Run all tests**
851
+
852
+
Run: `npm test && npm run test:e2e`
853
+
Expected: PASS
854
+
855
+
**Step 3: Commit**
856
+
857
+
```bash
858
+
git add src/pds.js
859
+
git commit -m "refactor(scope): remove deprecated hasRequiredScope function"
860
+
```
861
+
862
+
---
863
+
864
+
## Task 10: Update scope-comparison.md
865
+
866
+
**Files:**
867
+
- Modify: `docs/scope-comparison.md`
868
+
869
+
**Step 1: Update status in comparison doc**
870
+
871
+
Change the pds.js column entries to reflect new implementation:
872
+
873
+
- `atproto`: "Full access"
874
+
- `transition:generic`: "Full access"
875
+
- `repo:<collection>:<action>`: "Full parsing + enforcement"
876
+
- `blob:<mime>`: "Full parsing + enforcement"
877
+
878
+
**Step 2: Commit**
879
+
880
+
```bash
881
+
git add docs/scope-comparison.md
882
+
git commit -m "docs: update scope comparison with implementation status"
883
+
```
884
+
885
+
---
886
+
887
+
## Summary
888
+
889
+
| Task | Description | Est. Time |
890
+
|------|-------------|-----------|
891
+
| 1 | Parse repo scopes | 5 min |
892
+
| 2 | Parse blob scopes + MIME matching | 5 min |
893
+
| 3 | ScopePermissions class | 10 min |
894
+
| 4 | Integrate into createRecord | 10 min |
895
+
| 5 | Integrate into putRecord | 5 min |
896
+
| 6 | Integrate into deleteRecord | 5 min |
897
+
| 7 | Integrate into applyWrites | 10 min |
898
+
| 8 | Integrate into uploadBlob | 5 min |
899
+
| 9 | Remove old hasRequiredScope | 5 min |
900
+
| 10 | Update docs | 5 min |
901
+
902
+
**Total: ~65 minutes**
+563
docs/plans/2026-01-08-consent-permissions-table.md
+563
docs/plans/2026-01-08-consent-permissions-table.md
···
1
+
# Consent Page Permissions Table Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Display OAuth scopes as a human-readable permissions table on the consent page, matching official atproto PDS behavior.
6
+
7
+
**Architecture:** Update `parseRepoScope()` to handle official query parameter format, add display helpers to parse scopes into a permissions map, render as HTML table with Create/Update/Delete columns. Three display modes: identity-only (no table), granular scopes (table), full access (warning banner).
8
+
9
+
**Tech Stack:** Vanilla JavaScript, HTML/CSS (inline in template string)
10
+
11
+
---
12
+
13
+
### Task 1: Update parseRepoScope to Handle Query Parameters
14
+
15
+
**Files:**
16
+
- Modify: `src/pds.js:4558-4580` (parseRepoScope function)
17
+
- Test: `test/pds.test.js` (parseRepoScope tests)
18
+
19
+
**Step 1: Write failing tests for new format**
20
+
21
+
Add to existing parseRepoScope test block in `test/pds.test.js`:
22
+
23
+
```javascript
24
+
test('parses repo scope with query parameter action', () => {
25
+
const result = parseRepoScope('repo:app.bsky.feed.post?action=create');
26
+
assert.deepStrictEqual(result, {
27
+
collection: 'app.bsky.feed.post',
28
+
actions: ['create'],
29
+
});
30
+
});
31
+
32
+
test('parses repo scope with multiple query parameter actions', () => {
33
+
const result = parseRepoScope('repo:app.bsky.feed.post?action=create&action=update');
34
+
assert.deepStrictEqual(result, {
35
+
collection: 'app.bsky.feed.post',
36
+
actions: ['create', 'update'],
37
+
});
38
+
});
39
+
40
+
test('parses repo scope without actions as all actions', () => {
41
+
const result = parseRepoScope('repo:app.bsky.feed.post');
42
+
assert.deepStrictEqual(result, {
43
+
collection: 'app.bsky.feed.post',
44
+
actions: ['create', 'update', 'delete'],
45
+
});
46
+
});
47
+
48
+
test('parses wildcard collection with action', () => {
49
+
const result = parseRepoScope('repo:*?action=create');
50
+
assert.deepStrictEqual(result, {
51
+
collection: '*',
52
+
actions: ['create'],
53
+
});
54
+
});
55
+
56
+
test('parses query-only format', () => {
57
+
const result = parseRepoScope('repo?collection=app.bsky.feed.post&action=create');
58
+
assert.deepStrictEqual(result, {
59
+
collection: 'app.bsky.feed.post',
60
+
actions: ['create'],
61
+
});
62
+
});
63
+
```
64
+
65
+
**Step 2: Run tests to verify they fail**
66
+
67
+
Run: `npm test 2>&1 | grep -A2 'parses repo scope with query'`
68
+
Expected: FAIL - current parser doesn't handle query params
69
+
70
+
**Step 3: Rewrite parseRepoScope implementation**
71
+
72
+
Replace the existing `parseRepoScope` function in `src/pds.js`:
73
+
74
+
```javascript
75
+
/**
76
+
* Parse a repo scope string into collection and actions.
77
+
* Official format: repo:collection?action=create&action=update
78
+
* Or: repo?collection=foo&action=create
79
+
* Without actions defaults to all: create, update, delete
80
+
* @param {string} scope - The scope string to parse
81
+
* @returns {{ collection: string, actions: string[] } | null} Parsed scope or null if invalid
82
+
*/
83
+
export function parseRepoScope(scope) {
84
+
if (!scope.startsWith('repo:') && !scope.startsWith('repo?')) return null;
85
+
86
+
const ALL_ACTIONS = ['create', 'update', 'delete'];
87
+
let collection;
88
+
let actions;
89
+
90
+
const questionIdx = scope.indexOf('?');
91
+
if (questionIdx === -1) {
92
+
// repo:collection (no query params = all actions)
93
+
collection = scope.slice(5);
94
+
actions = ALL_ACTIONS;
95
+
} else {
96
+
// Parse query parameters
97
+
const queryString = scope.slice(questionIdx + 1);
98
+
const params = new URLSearchParams(queryString);
99
+
const pathPart = scope.startsWith('repo:') ? scope.slice(5, questionIdx) : '';
100
+
101
+
collection = pathPart || params.get('collection');
102
+
actions = params.getAll('action');
103
+
if (actions.length === 0) actions = ALL_ACTIONS;
104
+
}
105
+
106
+
if (!collection) return null;
107
+
108
+
// Validate actions
109
+
const validActions = actions.filter((a) => ALL_ACTIONS.includes(a));
110
+
if (validActions.length === 0) return null;
111
+
112
+
return { collection, actions: validActions };
113
+
}
114
+
```
115
+
116
+
**Step 4: Run tests to verify they pass**
117
+
118
+
Run: `npm test`
119
+
Expected: All parseRepoScope tests pass
120
+
121
+
**Step 5: Remove old format tests that no longer apply**
122
+
123
+
Remove tests for colon-delimited action format (e.g., `repo:collection:create,update`) from test file.
124
+
125
+
**Step 6: Run tests to verify still passing**
126
+
127
+
Run: `npm test`
128
+
Expected: PASS
129
+
130
+
**Step 7: Commit**
131
+
132
+
```bash
133
+
git add src/pds.js test/pds.test.js
134
+
git commit -m "refactor(scope): update parseRepoScope to official query param format"
135
+
```
136
+
137
+
---
138
+
139
+
### Task 2: Update ScopePermissions to Use New Parser
140
+
141
+
**Files:**
142
+
- Modify: `src/pds.js:4700-4710` (assertRepo method)
143
+
- Test: `test/pds.test.js` (ScopePermissions tests)
144
+
145
+
**Step 1: Update ScopePermissions.allowsRepo to handle new format**
146
+
147
+
The `allowsRepo` method should still work since it iterates `repoPermissions` which now have new structure. Verify with test.
148
+
149
+
**Step 2: Write test for new format compatibility**
150
+
151
+
```javascript
152
+
test('allowsRepo with query param format scopes', () => {
153
+
const perms = new ScopePermissions('atproto repo:app.bsky.feed.post?action=create');
154
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'create'), true);
155
+
assert.strictEqual(perms.allowsRepo('app.bsky.feed.post', 'delete'), false);
156
+
});
157
+
```
158
+
159
+
**Step 3: Run test**
160
+
161
+
Run: `npm test`
162
+
Expected: PASS (existing logic should work)
163
+
164
+
**Step 4: Update assertRepo error message format**
165
+
166
+
In `assertRepo` method, update the error message to use official format:
167
+
168
+
```javascript
169
+
assertRepo(collection, action) {
170
+
if (!this.allowsRepo(collection, action)) {
171
+
throw new ScopeMissingError(`repo:${collection}?action=${action}`);
172
+
}
173
+
}
174
+
```
175
+
176
+
**Step 5: Run tests**
177
+
178
+
Run: `npm test`
179
+
Expected: PASS
180
+
181
+
**Step 6: Commit**
182
+
183
+
```bash
184
+
git add src/pds.js test/pds.test.js
185
+
git commit -m "refactor(scope): update ScopePermissions for query param format"
186
+
```
187
+
188
+
---
189
+
190
+
### Task 3: Add parseScopesForDisplay Helper
191
+
192
+
**Files:**
193
+
- Modify: `src/pds.js` (add new function near renderConsentPage)
194
+
- Test: `test/pds.test.js`
195
+
196
+
**Step 1: Write failing test**
197
+
198
+
```javascript
199
+
describe('parseScopesForDisplay', () => {
200
+
test('parses identity-only scope', () => {
201
+
const result = parseScopesForDisplay('atproto');
202
+
assert.strictEqual(result.hasAtproto, true);
203
+
assert.strictEqual(result.hasTransitionGeneric, false);
204
+
assert.strictEqual(result.repoPermissions.size, 0);
205
+
assert.deepStrictEqual(result.blobPermissions, []);
206
+
});
207
+
208
+
test('parses granular repo scopes', () => {
209
+
const result = parseScopesForDisplay('atproto repo:app.bsky.feed.post?action=create&action=update');
210
+
assert.strictEqual(result.repoPermissions.size, 1);
211
+
const postPerms = result.repoPermissions.get('app.bsky.feed.post');
212
+
assert.deepStrictEqual(postPerms, { create: true, update: true, delete: false });
213
+
});
214
+
215
+
test('merges multiple scopes for same collection', () => {
216
+
const result = parseScopesForDisplay('atproto repo:app.bsky.feed.post?action=create repo:app.bsky.feed.post?action=delete');
217
+
const postPerms = result.repoPermissions.get('app.bsky.feed.post');
218
+
assert.deepStrictEqual(postPerms, { create: true, update: false, delete: true });
219
+
});
220
+
221
+
test('parses blob scopes', () => {
222
+
const result = parseScopesForDisplay('atproto blob:image/*');
223
+
assert.deepStrictEqual(result.blobPermissions, ['image/*']);
224
+
});
225
+
226
+
test('detects transition:generic', () => {
227
+
const result = parseScopesForDisplay('atproto transition:generic');
228
+
assert.strictEqual(result.hasTransitionGeneric, true);
229
+
});
230
+
});
231
+
```
232
+
233
+
**Step 2: Run tests to verify they fail**
234
+
235
+
Run: `npm test 2>&1 | grep -A2 'parseScopesForDisplay'`
236
+
Expected: FAIL - function doesn't exist
237
+
238
+
**Step 3: Add export to pds.js and implement**
239
+
240
+
```javascript
241
+
/**
242
+
* Parse scope string into display-friendly structure.
243
+
* @param {string} scope - Space-separated scope string
244
+
* @returns {{ hasAtproto: boolean, hasTransitionGeneric: boolean, repoPermissions: Map<string, {create: boolean, update: boolean, delete: boolean}>, blobPermissions: string[] }}
245
+
*/
246
+
export function parseScopesForDisplay(scope) {
247
+
const scopes = scope.split(' ').filter((s) => s);
248
+
249
+
const repoPermissions = new Map();
250
+
251
+
for (const s of scopes) {
252
+
const repo = parseRepoScope(s);
253
+
if (repo) {
254
+
const existing = repoPermissions.get(repo.collection) || {
255
+
create: false,
256
+
update: false,
257
+
delete: false,
258
+
};
259
+
for (const action of repo.actions) {
260
+
existing[action] = true;
261
+
}
262
+
repoPermissions.set(repo.collection, existing);
263
+
}
264
+
}
265
+
266
+
const blobPermissions = [];
267
+
for (const s of scopes) {
268
+
const blob = parseBlobScope(s);
269
+
if (blob) blobPermissions.push(...blob.accept);
270
+
}
271
+
272
+
return {
273
+
hasAtproto: scopes.includes('atproto'),
274
+
hasTransitionGeneric: scopes.includes('transition:generic'),
275
+
repoPermissions,
276
+
blobPermissions,
277
+
};
278
+
}
279
+
```
280
+
281
+
**Step 4: Run tests**
282
+
283
+
Run: `npm test`
284
+
Expected: PASS
285
+
286
+
**Step 5: Commit**
287
+
288
+
```bash
289
+
git add src/pds.js test/pds.test.js
290
+
git commit -m "feat(consent): add parseScopesForDisplay helper"
291
+
```
292
+
293
+
---
294
+
295
+
### Task 4: Add Permission Rendering Helpers
296
+
297
+
**Files:**
298
+
- Modify: `src/pds.js` (add functions near renderConsentPage)
299
+
300
+
**Step 1: Add renderRepoTable helper**
301
+
302
+
```javascript
303
+
/**
304
+
* Render repo permissions as HTML table.
305
+
* @param {Map<string, {create: boolean, update: boolean, delete: boolean}>} repoPermissions
306
+
* @returns {string} HTML string
307
+
*/
308
+
function renderRepoTable(repoPermissions) {
309
+
if (repoPermissions.size === 0) return '';
310
+
311
+
let rows = '';
312
+
for (const [collection, actions] of repoPermissions) {
313
+
const displayCollection = collection === '*' ? '* (any)' : collection;
314
+
rows += `<tr>
315
+
<td>${escapeHtml(displayCollection)}</td>
316
+
<td class="check">${actions.create ? 'โ' : ''}</td>
317
+
<td class="check">${actions.update ? 'โ' : ''}</td>
318
+
<td class="check">${actions.delete ? 'โ' : ''}</td>
319
+
</tr>`;
320
+
}
321
+
322
+
return `<div class="permissions-section">
323
+
<div class="section-label">Repository permissions:</div>
324
+
<table class="permissions-table">
325
+
<thead><tr><th>Collection</th><th>C</th><th>U</th><th>D</th></tr></thead>
326
+
<tbody>${rows}</tbody>
327
+
</table>
328
+
</div>`;
329
+
}
330
+
```
331
+
332
+
**Step 2: Add renderBlobList helper**
333
+
334
+
```javascript
335
+
/**
336
+
* Render blob permissions as HTML list.
337
+
* @param {string[]} blobPermissions
338
+
* @returns {string} HTML string
339
+
*/
340
+
function renderBlobList(blobPermissions) {
341
+
if (blobPermissions.length === 0) return '';
342
+
343
+
const items = blobPermissions
344
+
.map((mime) => `<li>${escapeHtml(mime === '*/*' ? 'All file types' : mime)}</li>`)
345
+
.join('');
346
+
347
+
return `<div class="permissions-section">
348
+
<div class="section-label">Upload permissions:</div>
349
+
<ul class="blob-list">${items}</ul>
350
+
</div>`;
351
+
}
352
+
```
353
+
354
+
**Step 3: Add renderPermissionsHtml helper**
355
+
356
+
```javascript
357
+
/**
358
+
* Render full permissions display based on parsed scopes.
359
+
* @param {{ hasAtproto: boolean, hasTransitionGeneric: boolean, repoPermissions: Map, blobPermissions: string[] }} parsed
360
+
* @returns {string} HTML string
361
+
*/
362
+
function renderPermissionsHtml(parsed) {
363
+
if (parsed.hasTransitionGeneric) {
364
+
return `<div class="warning">โ ๏ธ Full repository access requested<br>
365
+
<small>This app can create, update, and delete any data in your repository.</small></div>`;
366
+
}
367
+
368
+
if (parsed.repoPermissions.size === 0 && parsed.blobPermissions.length === 0) {
369
+
return '';
370
+
}
371
+
372
+
return renderRepoTable(parsed.repoPermissions) + renderBlobList(parsed.blobPermissions);
373
+
}
374
+
```
375
+
376
+
**Step 4: Add escapeHtml helper (if not exists)**
377
+
378
+
Check if `escHtml` exists in renderConsentPage - rename to `escapeHtml` and move outside function for reuse, or create new one:
379
+
380
+
```javascript
381
+
/**
382
+
* Escape HTML special characters.
383
+
* @param {string} s
384
+
* @returns {string}
385
+
*/
386
+
function escapeHtml(s) {
387
+
return s
388
+
.replace(/&/g, '&')
389
+
.replace(/</g, '<')
390
+
.replace(/>/g, '>')
391
+
.replace(/"/g, '"');
392
+
}
393
+
```
394
+
395
+
**Step 5: Run lint/format**
396
+
397
+
Run: `npm run format && npm run lint`
398
+
Expected: PASS
399
+
400
+
**Step 6: Commit**
401
+
402
+
```bash
403
+
git add src/pds.js
404
+
git commit -m "feat(consent): add permission rendering helpers"
405
+
```
406
+
407
+
---
408
+
409
+
### Task 5: Update renderConsentPage
410
+
411
+
**Files:**
412
+
- Modify: `src/pds.js:583-628` (renderConsentPage function)
413
+
414
+
**Step 1: Add new CSS to renderConsentPage**
415
+
416
+
Add to the `<style>` block:
417
+
418
+
```css
419
+
.permissions-section{margin:16px 0}
420
+
.section-label{color:#b0b0b0;font-size:13px;margin-bottom:8px}
421
+
.permissions-table{width:100%;border-collapse:collapse;font-size:13px}
422
+
.permissions-table th{color:#808080;font-weight:normal;text-align:left;padding:4px 8px;border-bottom:1px solid #333}
423
+
.permissions-table th:not(:first-child){text-align:center;width:32px}
424
+
.permissions-table td{padding:4px 8px;border-bottom:1px solid #2a2a2a}
425
+
.permissions-table td:not(:first-child){text-align:center}
426
+
.permissions-table .check{color:#4ade80}
427
+
.blob-list{margin:0;padding-left:20px;color:#e0e0e0;font-size:13px}
428
+
.blob-list li{margin:4px 0}
429
+
.warning{background:#3d2f00;border:1px solid #5c4a00;border-radius:6px;padding:12px;color:#fbbf24;margin:16px 0}
430
+
.warning small{color:#d4a000;display:block;margin-top:4px}
431
+
```
432
+
433
+
**Step 2: Update body content**
434
+
435
+
Replace the scope display line:
436
+
```javascript
437
+
// Old:
438
+
<p>Scope: ${escHtml(scope)}</p>
439
+
440
+
// New:
441
+
const parsed = parseScopesForDisplay(scope);
442
+
const isIdentityOnly = parsed.repoPermissions.size === 0 &&
443
+
parsed.blobPermissions.length === 0 &&
444
+
!parsed.hasTransitionGeneric;
445
+
446
+
// In template:
447
+
<p><b>${escHtml(clientName)}</b> ${isIdentityOnly ?
448
+
'wants to uniquely identify you through your account.' :
449
+
'wants to access your account.'}</p>
450
+
${renderPermissionsHtml(parsed)}
451
+
```
452
+
453
+
**Step 3: Run the app and test manually**
454
+
455
+
Run: `npm run dev`
456
+
Test: Navigate to OAuth flow with different scope combinations
457
+
458
+
**Step 4: Run all tests**
459
+
460
+
Run: `npm test`
461
+
Expected: PASS
462
+
463
+
**Step 5: Run format/lint/check**
464
+
465
+
Run: `npm run format && npm run lint && npm run check`
466
+
Expected: PASS
467
+
468
+
**Step 6: Commit**
469
+
470
+
```bash
471
+
git add src/pds.js
472
+
git commit -m "feat(consent): display scopes as permissions table"
473
+
```
474
+
475
+
---
476
+
477
+
### Task 6: Add E2E Test for Consent Page Display
478
+
479
+
**Files:**
480
+
- Modify: `test/e2e.test.js`
481
+
482
+
**Step 1: Add test for consent page content**
483
+
484
+
```javascript
485
+
it('consent page shows permissions table for granular scopes', async () => {
486
+
// Create PAR request with granular scopes
487
+
const codeVerifier = 'test-verifier-' + randomBytes(16).toString('hex');
488
+
const codeChallenge = createHash('sha256')
489
+
.update(codeVerifier)
490
+
.digest('base64url');
491
+
492
+
const parRes = await fetch(`${BASE}/oauth/par`, {
493
+
method: 'POST',
494
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
495
+
body: new URLSearchParams({
496
+
client_id: `http://localhost?redirect_uri=${encodeURIComponent('http://127.0.0.1:3000/callback')}`,
497
+
redirect_uri: 'http://127.0.0.1:3000/callback',
498
+
response_type: 'code',
499
+
scope: 'atproto repo:app.bsky.feed.post?action=create&action=update blob:image/*',
500
+
code_challenge: codeChallenge,
501
+
code_challenge_method: 'S256',
502
+
state: 'test-state',
503
+
}),
504
+
});
505
+
506
+
const { request_uri } = await parRes.json();
507
+
508
+
// GET the authorize page
509
+
const authorizeRes = await fetch(
510
+
`${BASE}/oauth/authorize?client_id=${encodeURIComponent(`http://localhost?redirect_uri=${encodeURIComponent('http://127.0.0.1:3000/callback')}`)}&request_uri=${encodeURIComponent(request_uri)}`,
511
+
);
512
+
513
+
const html = await authorizeRes.text();
514
+
515
+
// Verify permissions table is rendered
516
+
assert.ok(html.includes('Repository permissions:'), 'Should show repo permissions section');
517
+
assert.ok(html.includes('app.bsky.feed.post'), 'Should show collection name');
518
+
assert.ok(html.includes('Upload permissions:'), 'Should show upload permissions section');
519
+
assert.ok(html.includes('image/*'), 'Should show blob MIME type');
520
+
});
521
+
```
522
+
523
+
**Step 2: Run E2E tests**
524
+
525
+
Run: `npm run test:e2e`
526
+
Expected: PASS
527
+
528
+
**Step 3: Commit**
529
+
530
+
```bash
531
+
git add test/e2e.test.js
532
+
git commit -m "test(consent): add E2E test for permissions table display"
533
+
```
534
+
535
+
---
536
+
537
+
### Task 7: Final Verification and Cleanup
538
+
539
+
**Step 1: Run full test suite**
540
+
541
+
Run: `npm test && npm run test:e2e`
542
+
Expected: All tests pass
543
+
544
+
**Step 2: Run all quality checks**
545
+
546
+
Run: `npm run format && npm run lint && npm run check && npm run typecheck`
547
+
Expected: All pass
548
+
549
+
**Step 3: Manual verification**
550
+
551
+
1. Start dev server: `npm run dev`
552
+
2. Test consent page with various scopes:
553
+
- `atproto` only โ should show "uniquely identify you"
554
+
- `atproto repo:app.bsky.feed.post?action=create` โ should show table
555
+
- `atproto transition:generic` โ should show warning banner
556
+
- `atproto blob:image/*` โ should show upload permissions
557
+
558
+
**Step 4: Final commit if any fixes needed**
559
+
560
+
```bash
561
+
git add -A
562
+
git commit -m "chore: final cleanup for consent permissions table"
563
+
```
+480
docs/plans/2026-01-08-foreign-did-proxying.md
+480
docs/plans/2026-01-08-foreign-did-proxying.md
···
1
+
# Foreign DID Proxying Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Handle foreign DID requests by either (1) respecting `atproto-proxy` header, or (2) detecting foreign `repo` param and proxying to AppView.
6
+
7
+
**Architecture:** (matches official PDS)
8
+
1. Check if `repo` is a local DID โ handle locally (ignore atproto-proxy)
9
+
2. If foreign DID with `atproto-proxy` header โ proxy to specified service
10
+
3. If foreign DID without header โ proxy to AppView (default)
11
+
12
+
**Tech Stack:** Cloudflare Workers, Durable Objects, ATProto
13
+
14
+
---
15
+
16
+
## Background
17
+
18
+
When a client needs data from a foreign DID, it may:
19
+
1. Send `atproto-proxy: did:web:api.bsky.app#bsky_appview` header (explicit)
20
+
2. Just send `repo=did:plc:foreign...` without header (implicit)
21
+
22
+
Our PDS should handle both cases. Currently it ignores the header and always tries to find records locally.
23
+
24
+
---
25
+
26
+
### Task 1: Add parseAtprotoProxyHeader Utility
27
+
28
+
**Files:**
29
+
- Modify: `src/pds.js` (after errorResponse function, around line 178)
30
+
31
+
**Step 1: Add the utility function**
32
+
33
+
```javascript
34
+
/**
35
+
* Parse atproto-proxy header to get service DID and service ID
36
+
* Format: "did:web:api.bsky.app#bsky_appview"
37
+
* @param {string} header
38
+
* @returns {{ did: string, serviceId: string } | null}
39
+
*/
40
+
function parseAtprotoProxyHeader(header) {
41
+
if (!header) return null;
42
+
const hashIndex = header.indexOf('#');
43
+
if (hashIndex === -1 || hashIndex === 0 || hashIndex === header.length - 1) {
44
+
return null;
45
+
}
46
+
return {
47
+
did: header.slice(0, hashIndex),
48
+
serviceId: header.slice(hashIndex + 1),
49
+
};
50
+
}
51
+
```
52
+
53
+
**Step 2: Commit**
54
+
55
+
```bash
56
+
git add src/pds.js
57
+
git commit -m "feat: add parseAtprotoProxyHeader utility"
58
+
```
59
+
60
+
---
61
+
62
+
### Task 2: Add getKnownServiceUrl Utility
63
+
64
+
**Files:**
65
+
- Modify: `src/pds.js` (after parseAtprotoProxyHeader)
66
+
67
+
**Step 1: Add utility to resolve service URLs**
68
+
69
+
```javascript
70
+
/**
71
+
* Get URL for a known service DID
72
+
* @param {string} did - Service DID (e.g., "did:web:api.bsky.app")
73
+
* @param {string} serviceId - Service ID (e.g., "bsky_appview")
74
+
* @returns {string | null}
75
+
*/
76
+
function getKnownServiceUrl(did, serviceId) {
77
+
// Known Bluesky services
78
+
if (did === 'did:web:api.bsky.app' && serviceId === 'bsky_appview') {
79
+
return 'https://api.bsky.app';
80
+
}
81
+
// Add more known services as needed
82
+
return null;
83
+
}
84
+
```
85
+
86
+
**Step 2: Commit**
87
+
88
+
```bash
89
+
git add src/pds.js
90
+
git commit -m "feat: add getKnownServiceUrl utility"
91
+
```
92
+
93
+
---
94
+
95
+
### Task 3: Add proxyToService Utility
96
+
97
+
**Files:**
98
+
- Modify: `src/pds.js` (after getKnownServiceUrl)
99
+
100
+
**Step 1: Add the proxy utility function**
101
+
102
+
```javascript
103
+
/**
104
+
* Proxy a request to a service
105
+
* @param {Request} request - Original request
106
+
* @param {string} serviceUrl - Target service URL (e.g., "https://api.bsky.app")
107
+
* @param {string} [authHeader] - Optional Authorization header
108
+
* @returns {Promise<Response>}
109
+
*/
110
+
async function proxyToService(request, serviceUrl, authHeader) {
111
+
const url = new URL(request.url);
112
+
const targetUrl = new URL(url.pathname + url.search, serviceUrl);
113
+
114
+
const headers = new Headers();
115
+
if (authHeader) {
116
+
headers.set('Authorization', authHeader);
117
+
}
118
+
headers.set(
119
+
'Content-Type',
120
+
request.headers.get('Content-Type') || 'application/json',
121
+
);
122
+
const acceptHeader = request.headers.get('Accept');
123
+
if (acceptHeader) {
124
+
headers.set('Accept', acceptHeader);
125
+
}
126
+
const acceptLangHeader = request.headers.get('Accept-Language');
127
+
if (acceptLangHeader) {
128
+
headers.set('Accept-Language', acceptLangHeader);
129
+
}
130
+
// Forward atproto-specific headers
131
+
const labelersHeader = request.headers.get('atproto-accept-labelers');
132
+
if (labelersHeader) {
133
+
headers.set('atproto-accept-labelers', labelersHeader);
134
+
}
135
+
const topicsHeader = request.headers.get('x-bsky-topics');
136
+
if (topicsHeader) {
137
+
headers.set('x-bsky-topics', topicsHeader);
138
+
}
139
+
140
+
try {
141
+
const response = await fetch(targetUrl.toString(), {
142
+
method: request.method,
143
+
headers,
144
+
body:
145
+
request.method !== 'GET' && request.method !== 'HEAD'
146
+
? request.body
147
+
: undefined,
148
+
});
149
+
const responseHeaders = new Headers(response.headers);
150
+
responseHeaders.set('Access-Control-Allow-Origin', '*');
151
+
return new Response(response.body, {
152
+
status: response.status,
153
+
statusText: response.statusText,
154
+
headers: responseHeaders,
155
+
});
156
+
} catch (err) {
157
+
const message = err instanceof Error ? err.message : String(err);
158
+
return errorResponse('UpstreamFailure', `Failed to reach service: ${message}`, 502);
159
+
}
160
+
}
161
+
```
162
+
163
+
**Step 2: Commit**
164
+
165
+
```bash
166
+
git add src/pds.js
167
+
git commit -m "feat: add proxyToService utility"
168
+
```
169
+
170
+
---
171
+
172
+
### Task 4: Add isLocalDid Helper
173
+
174
+
**Files:**
175
+
- Modify: `src/pds.js` (after proxyToService)
176
+
177
+
**Step 1: Add helper to check if DID is registered locally**
178
+
179
+
```javascript
180
+
/**
181
+
* Check if a DID is registered on this PDS
182
+
* @param {Env} env
183
+
* @param {string} did
184
+
* @returns {Promise<boolean>}
185
+
*/
186
+
async function isLocalDid(env, did) {
187
+
const defaultPds = getDefaultPds(env);
188
+
const res = await defaultPds.fetch(
189
+
new Request('http://internal/get-registered-dids'),
190
+
);
191
+
if (!res.ok) return false;
192
+
const { dids } = await res.json();
193
+
return dids.includes(did);
194
+
}
195
+
```
196
+
197
+
**Step 2: Commit**
198
+
199
+
```bash
200
+
git add src/pds.js
201
+
git commit -m "feat: add isLocalDid helper"
202
+
```
203
+
204
+
---
205
+
206
+
### Task 5: Refactor handleAppViewProxy to Use proxyToService
207
+
208
+
**Files:**
209
+
- Modify: `src/pds.js:2725-2782` (handleAppViewProxy in PersonalDataServer class)
210
+
211
+
**Step 1: Refactor the method**
212
+
213
+
Replace with:
214
+
215
+
```javascript
216
+
/**
217
+
* @param {Request} request
218
+
* @param {string} userDid
219
+
*/
220
+
async handleAppViewProxy(request, userDid) {
221
+
const url = new URL(request.url);
222
+
const lxm = url.pathname.replace('/xrpc/', '');
223
+
const serviceJwt = await this.createServiceAuthForAppView(userDid, lxm);
224
+
return proxyToService(request, 'https://api.bsky.app', `Bearer ${serviceJwt}`);
225
+
}
226
+
```
227
+
228
+
**Step 2: Run existing tests**
229
+
230
+
```bash
231
+
npm test
232
+
```
233
+
234
+
Expected: All tests pass
235
+
236
+
**Step 3: Commit**
237
+
238
+
```bash
239
+
git add src/pds.js
240
+
git commit -m "refactor: simplify handleAppViewProxy using proxyToService"
241
+
```
242
+
243
+
---
244
+
245
+
### Task 6: Handle Foreign Repo with atproto-proxy Support in Worker Routing
246
+
247
+
**Files:**
248
+
- Modify: `src/pds.js` in `handleRequest` function (around line 5199)
249
+
250
+
**Step 1: Update repo endpoints routing to match official PDS behavior**
251
+
252
+
Find the repo endpoints routing block and REPLACE the entire block.
253
+
254
+
Order of operations (matches official PDS):
255
+
1. Check if repo is local โ return local data
256
+
2. If foreign โ check atproto-proxy header for specific service
257
+
3. If no header โ default to AppView
258
+
259
+
```javascript
260
+
// Repo endpoints use ?repo= param instead of ?did=
261
+
if (
262
+
url.pathname === '/xrpc/com.atproto.repo.describeRepo' ||
263
+
url.pathname === '/xrpc/com.atproto.repo.listRecords' ||
264
+
url.pathname === '/xrpc/com.atproto.repo.getRecord'
265
+
) {
266
+
const repo = url.searchParams.get('repo');
267
+
if (!repo) {
268
+
return errorResponse('InvalidRequest', 'missing repo param', 400);
269
+
}
270
+
271
+
// Check if this is a local DID - if so, handle locally
272
+
const isLocal = await isLocalDid(env, repo);
273
+
if (isLocal) {
274
+
const id = env.PDS.idFromName(repo);
275
+
const pds = env.PDS.get(id);
276
+
return pds.fetch(request);
277
+
}
278
+
279
+
// Foreign DID - check for atproto-proxy header
280
+
const proxyHeader = request.headers.get('atproto-proxy');
281
+
if (proxyHeader) {
282
+
const parsed = parseAtprotoProxyHeader(proxyHeader);
283
+
if (parsed) {
284
+
const serviceUrl = getKnownServiceUrl(parsed.did, parsed.serviceId);
285
+
if (serviceUrl) {
286
+
return proxyToService(request, serviceUrl);
287
+
}
288
+
// Unknown service - could add DID resolution here in the future
289
+
return errorResponse('InvalidRequest', `Unknown proxy service: ${proxyHeader}`, 400);
290
+
}
291
+
}
292
+
293
+
// No header - default to AppView
294
+
return proxyToService(request, 'https://api.bsky.app');
295
+
}
296
+
```
297
+
298
+
**Step 2: Run existing tests**
299
+
300
+
```bash
301
+
npm test
302
+
```
303
+
304
+
Expected: All tests pass
305
+
306
+
**Step 3: Commit**
307
+
308
+
```bash
309
+
git add src/pds.js
310
+
git commit -m "feat: handle atproto-proxy header and foreign repo proxying"
311
+
```
312
+
313
+
---
314
+
315
+
### Task 7: Add E2E Tests
316
+
317
+
**Files:**
318
+
- Modify: `test/e2e.test.js`
319
+
320
+
**Step 1: Add tests for proxy functionality**
321
+
322
+
Add a new describe block:
323
+
324
+
```javascript
325
+
describe('Foreign DID proxying', () => {
326
+
it('proxies to AppView when atproto-proxy header present', async () => {
327
+
// Use a known public post from Bluesky (bsky.app official account)
328
+
const res = await fetch(
329
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:z72i7hdynmk6r22z27h6tvur&collection=app.bsky.feed.post&rkey=3juzlwllznd24`,
330
+
{
331
+
headers: {
332
+
'atproto-proxy': 'did:web:api.bsky.app#bsky_appview',
333
+
},
334
+
},
335
+
);
336
+
// Should get response from AppView, not local 404
337
+
assert.ok(
338
+
res.status === 200 || res.status === 400,
339
+
`Expected 200 or 400 from AppView, got ${res.status}`,
340
+
);
341
+
});
342
+
343
+
it('proxies to AppView for foreign repo without header', async () => {
344
+
// Foreign DID without atproto-proxy header - should still proxy
345
+
const res = await fetch(
346
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:z72i7hdynmk6r22z27h6tvur&collection=app.bsky.feed.post&rkey=3juzlwllznd24`,
347
+
);
348
+
// Should get response from AppView, not local 404
349
+
assert.ok(
350
+
res.status === 200 || res.status === 400,
351
+
`Expected 200 or 400 from AppView, got ${res.status}`,
352
+
);
353
+
});
354
+
355
+
it('returns error for unknown proxy service', async () => {
356
+
const res = await fetch(
357
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:test&collection=test&rkey=test`,
358
+
{
359
+
headers: {
360
+
'atproto-proxy': 'did:web:unknown.service#unknown',
361
+
},
362
+
},
363
+
);
364
+
assert.strictEqual(res.status, 400);
365
+
const data = await res.json();
366
+
assert.ok(data.message.includes('Unknown proxy service'));
367
+
});
368
+
369
+
it('returns local record for local DID without proxy header', async () => {
370
+
// Create a record first
371
+
const { data: created } = await jsonPost(
372
+
'/xrpc/com.atproto.repo.createRecord',
373
+
{
374
+
repo: DID,
375
+
collection: 'app.bsky.feed.post',
376
+
record: {
377
+
$type: 'app.bsky.feed.post',
378
+
text: 'Test post for local DID test',
379
+
createdAt: new Date().toISOString(),
380
+
},
381
+
},
382
+
{ Authorization: `Bearer ${token}` },
383
+
);
384
+
385
+
// Fetch without proxy header - should get local record
386
+
const rkey = created.uri.split('/').pop();
387
+
const res = await fetch(
388
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=${DID}&collection=app.bsky.feed.post&rkey=${rkey}`,
389
+
);
390
+
assert.strictEqual(res.status, 200);
391
+
const data = await res.json();
392
+
assert.ok(data.value.text.includes('Test post for local DID test'));
393
+
});
394
+
395
+
it('describeRepo proxies for foreign DID', async () => {
396
+
const res = await fetch(
397
+
`${BASE}/xrpc/com.atproto.repo.describeRepo?repo=did:plc:z72i7hdynmk6r22z27h6tvur`,
398
+
);
399
+
// Should get response from AppView
400
+
assert.ok(res.status === 200 || res.status === 400);
401
+
});
402
+
403
+
it('listRecords proxies for foreign DID', async () => {
404
+
const res = await fetch(
405
+
`${BASE}/xrpc/com.atproto.repo.listRecords?repo=did:plc:z72i7hdynmk6r22z27h6tvur&collection=app.bsky.feed.post&limit=1`,
406
+
);
407
+
// Should get response from AppView
408
+
assert.ok(res.status === 200 || res.status === 400);
409
+
});
410
+
});
411
+
```
412
+
413
+
**Step 2: Run the tests**
414
+
415
+
```bash
416
+
npm test
417
+
```
418
+
419
+
Expected: All tests pass
420
+
421
+
**Step 3: Commit**
422
+
423
+
```bash
424
+
git add test/e2e.test.js
425
+
git commit -m "test: add e2e tests for foreign DID proxying"
426
+
```
427
+
428
+
---
429
+
430
+
### Task 8: Manual Verification
431
+
432
+
**Step 1: Deploy to dev**
433
+
434
+
```bash
435
+
npx wrangler deploy
436
+
```
437
+
438
+
**Step 2: Test with the original failing curl (with header)**
439
+
440
+
```bash
441
+
curl 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.repo.getRecord?collection=app.bsky.feed.post&repo=did%3Aplc%3Abcgltzqazw5tb6k2g3ttenbj&rkey=3mbx6iyfqps2c' \
442
+
-H 'atproto-proxy: did:web:api.bsky.app#bsky_appview'
443
+
```
444
+
445
+
Expected: Returns post data from AppView
446
+
447
+
**Step 3: Test without header (foreign repo detection)**
448
+
449
+
```bash
450
+
curl 'https://chad-pds.chad-53c.workers.dev/xrpc/com.atproto.repo.getRecord?collection=app.bsky.feed.post&repo=did%3Aplc%3Abcgltzqazw5tb6k2g3ttenbj&rkey=3mbx6iyfqps2c'
451
+
```
452
+
453
+
Expected: Also returns post data from AppView (detected as foreign DID)
454
+
455
+
**Step 4: Test replying to a post in Bluesky client**
456
+
457
+
Verify the original issue is fixed.
458
+
459
+
---
460
+
461
+
## Future Enhancements
462
+
463
+
1. **Service auth for proxied requests** - Add service JWT when proxying authenticated requests
464
+
2. **DID resolution** - Resolve unknown DIDs to find their service endpoints dynamically
465
+
3. **Caching** - Cache registered DIDs list to avoid repeated lookups
466
+
467
+
---
468
+
469
+
## Summary
470
+
471
+
| Task | Description |
472
+
|------|-------------|
473
+
| 1 | Add `parseAtprotoProxyHeader` utility |
474
+
| 2 | Add `getKnownServiceUrl` utility |
475
+
| 3 | Add `proxyToService` utility |
476
+
| 4 | Add `isLocalDid` helper |
477
+
| 5 | Refactor `handleAppViewProxy` to use shared utility |
478
+
| 6 | Handle `atproto-proxy` header AND foreign `repo` param |
479
+
| 7 | Add e2e tests |
480
+
| 8 | Manual verification |
+255
docs/plans/2026-01-09-consent-profile-card.md
+255
docs/plans/2026-01-09-consent-profile-card.md
···
1
+
# Consent Page Profile Card Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Show the authorizing user's Bluesky profile (avatar, name, handle) on the OAuth consent page.
6
+
7
+
**Architecture:** Add inline HTML/CSS/JS to the consent page. Profile is fetched client-side from Bluesky's public API using the `login_hint` parameter. Graceful degradation if fetch fails.
8
+
9
+
**Tech Stack:** Vanilla JS, Bluesky public API (`app.bsky.actor.getProfile`)
10
+
11
+
---
12
+
13
+
### Task 1: Update renderConsentPage signature
14
+
15
+
**Files:**
16
+
- Modify: `src/pds.js:5008-5017` (function signature and JSDoc)
17
+
18
+
**Step 1: Add loginHint to JSDoc and parameters**
19
+
20
+
Change the function signature from:
21
+
```javascript
22
+
/**
23
+
* @param {{ clientName: string, clientId: string, scope: string, requestUri: string, error?: string }} params
24
+
* @returns {string} HTML page content
25
+
*/
26
+
function renderConsentPage({
27
+
clientName,
28
+
clientId,
29
+
scope,
30
+
requestUri,
31
+
error = '',
32
+
}) {
33
+
```
34
+
35
+
To:
36
+
```javascript
37
+
/**
38
+
* @param {{ clientName: string, clientId: string, scope: string, requestUri: string, loginHint?: string, error?: string }} params
39
+
* @returns {string} HTML page content
40
+
*/
41
+
function renderConsentPage({
42
+
clientName,
43
+
clientId,
44
+
scope,
45
+
requestUri,
46
+
loginHint = '',
47
+
error = '',
48
+
}) {
49
+
```
50
+
51
+
**Step 2: Verify syntax is correct**
52
+
53
+
Run: `node --check src/pds.js`
54
+
Expected: No output (success)
55
+
56
+
---
57
+
58
+
### Task 2: Add profile card CSS
59
+
60
+
**Files:**
61
+
- Modify: `src/pds.js:5027-5055` (inside the `<style>` block)
62
+
63
+
**Step 1: Add profile card styles after existing styles**
64
+
65
+
Add before `</style></head>`:
66
+
```css
67
+
.profile-card{display:flex;align-items:center;gap:12px;padding:16px;background:#2a2a2a;border-radius:8px;margin-bottom:20px}
68
+
.profile-card.loading .avatar{background:#404040;animation:pulse 1.5s infinite}
69
+
.profile-card .avatar{width:48px;height:48px;border-radius:50%;background:#404040;flex-shrink:0}
70
+
.profile-card .avatar img{width:100%;height:100%;border-radius:50%;object-fit:cover}
71
+
.profile-card .info{min-width:0}
72
+
.profile-card .name{color:#fff;font-weight:500;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
73
+
.profile-card .handle{color:#808080;font-size:14px}
74
+
@keyframes pulse{0%,100%{opacity:1}50%{opacity:0.5}}
75
+
```
76
+
77
+
**Step 2: Verify syntax is correct**
78
+
79
+
Run: `node --check src/pds.js`
80
+
Expected: No output (success)
81
+
82
+
---
83
+
84
+
### Task 3: Add profile card HTML
85
+
86
+
**Files:**
87
+
- Modify: `src/pds.js:5056-5057` (after `<body>` opening, before `<h2>`)
88
+
89
+
**Step 1: Add profile card HTML conditionally**
90
+
91
+
Replace:
92
+
```javascript
93
+
<body><h2>Sign in to authorize</h2>
94
+
```
95
+
96
+
With:
97
+
```javascript
98
+
<body>
99
+
${loginHint ? `<div class="profile-card loading" id="profile-card">
100
+
<div class="avatar" id="profile-avatar"></div>
101
+
<div class="info"><div class="name" id="profile-name">Loading...</div>
102
+
<div class="handle" id="profile-handle">${escapeHtml(loginHint.startsWith('did:') ? loginHint : '@' + loginHint)}</div></div>
103
+
</div>` : ''}
104
+
<h2>Sign in to authorize</h2>
105
+
```
106
+
107
+
**Step 2: Verify syntax is correct**
108
+
109
+
Run: `node --check src/pds.js`
110
+
Expected: No output (success)
111
+
112
+
---
113
+
114
+
### Task 4: Add profile fetch script
115
+
116
+
**Files:**
117
+
- Modify: `src/pds.js:5066` (before `</body></html>`)
118
+
119
+
**Step 1: Add inline script to fetch profile**
120
+
121
+
Replace:
122
+
```javascript
123
+
</form></body></html>`;
124
+
```
125
+
126
+
With:
127
+
```javascript
128
+
</form>
129
+
${loginHint ? `<script>
130
+
(async()=>{
131
+
const card=document.getElementById('profile-card');
132
+
if(!card)return;
133
+
try{
134
+
const r=await fetch('https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor='+encodeURIComponent('${escapeHtml(loginHint)}'));
135
+
if(!r.ok)throw new Error();
136
+
const p=await r.json();
137
+
document.getElementById('profile-avatar').innerHTML=p.avatar?'<img src="'+p.avatar+'" alt="">':'';
138
+
document.getElementById('profile-name').textContent=p.displayName||p.handle;
139
+
document.getElementById('profile-handle').textContent='@'+p.handle;
140
+
card.classList.remove('loading');
141
+
}catch(e){card.classList.remove('loading')}
142
+
})();
143
+
</script>` : ''}
144
+
</body></html>`;
145
+
```
146
+
147
+
**Step 2: Verify syntax is correct**
148
+
149
+
Run: `node --check src/pds.js`
150
+
Expected: No output (success)
151
+
152
+
---
153
+
154
+
### Task 5: Pass loginHint from PAR flow
155
+
156
+
**Files:**
157
+
- Modify: `src/pds.js:3954-3959` (PAR flow renderConsentPage call)
158
+
159
+
**Step 1: Add loginHint to renderConsentPage call**
160
+
161
+
Change:
162
+
```javascript
163
+
return new Response(
164
+
renderConsentPage({
165
+
clientName: clientMetadata.client_name || clientId,
166
+
clientId: clientId || '',
167
+
scope: parameters.scope || 'atproto',
168
+
requestUri: requestUri || '',
169
+
}),
170
+
```
171
+
172
+
To:
173
+
```javascript
174
+
return new Response(
175
+
renderConsentPage({
176
+
clientName: clientMetadata.client_name || clientId,
177
+
clientId: clientId || '',
178
+
scope: parameters.scope || 'atproto',
179
+
requestUri: requestUri || '',
180
+
loginHint: parameters.login_hint || '',
181
+
}),
182
+
```
183
+
184
+
**Step 2: Verify syntax is correct**
185
+
186
+
Run: `node --check src/pds.js`
187
+
Expected: No output (success)
188
+
189
+
---
190
+
191
+
### Task 6: Pass loginHint from direct flow
192
+
193
+
**Files:**
194
+
- Modify: `src/pds.js:4022-4027` (direct flow renderConsentPage call)
195
+
196
+
**Step 1: Add loginHint to renderConsentPage call**
197
+
198
+
Change:
199
+
```javascript
200
+
return new Response(
201
+
renderConsentPage({
202
+
clientName: clientMetadata.client_name || clientId,
203
+
clientId: clientId,
204
+
scope: scope || 'atproto',
205
+
requestUri: newRequestUri,
206
+
}),
207
+
```
208
+
209
+
To:
210
+
```javascript
211
+
return new Response(
212
+
renderConsentPage({
213
+
clientName: clientMetadata.client_name || clientId,
214
+
clientId: clientId,
215
+
scope: scope || 'atproto',
216
+
requestUri: newRequestUri,
217
+
loginHint: loginHint || '',
218
+
}),
219
+
```
220
+
221
+
**Step 2: Verify syntax is correct**
222
+
223
+
Run: `node --check src/pds.js`
224
+
Expected: No output (success)
225
+
226
+
---
227
+
228
+
### Task 7: Run tests and commit
229
+
230
+
**Step 1: Run full test suite**
231
+
232
+
Run: `npm test`
233
+
Expected: All 126 tests pass
234
+
235
+
**Step 2: Commit changes**
236
+
237
+
```bash
238
+
git add src/pds.js docs/plans/2025-01-09-consent-profile-card.md
239
+
git commit -m "feat: add profile card to OAuth consent page
240
+
241
+
Shows the authorizing user's avatar, display name, and handle
242
+
on the consent page. Fetches from Bluesky public API using
243
+
the login_hint parameter. Degrades gracefully if fetch fails."
244
+
```
245
+
246
+
---
247
+
248
+
## Manual Testing
249
+
250
+
After implementation, test by:
251
+
252
+
1. Start local PDS: `npx wrangler dev`
253
+
2. Trigger OAuth flow with login_hint parameter
254
+
3. Verify profile card shows on consent page
255
+
4. Verify it degrades gracefully with invalid login_hint
+146
docs/scope-comparison.md
+146
docs/scope-comparison.md
···
1
+
# Scope Validation Comparison: pds.js vs atproto PDS
2
+
3
+
Comparison of OAuth scope validation between this implementation and the official AT Protocol PDS.
4
+
5
+
---
6
+
7
+
## Scope Types Supported
8
+
9
+
| Scope Type | Format | pds.js | atproto PDS |
10
+
|------------|--------|--------|-------------|
11
+
| `atproto` | Static | Full access | Required for all OAuth |
12
+
| `transition:generic` | Static | Full access | Full repo/blob bypass |
13
+
| `transition:email` | Static | N/A | Read account email |
14
+
| `transition:chat.bsky` | Static | N/A | Chat RPC access |
15
+
| `repo:<collection>?action=<action>` | Granular | Full parsing + enforcement | Full parsing + enforcement |
16
+
| `blob:<mime>` | Granular | Full parsing + enforcement | Full parsing + enforcement |
17
+
| `rpc:<aud>:<lxm>` | Granular | Not implemented | Full parsing + enforcement |
18
+
19
+
---
20
+
21
+
## Scope Enforcement by Endpoint
22
+
23
+
### com.atproto.repo.createRecord
24
+
25
+
| Aspect | pds.js | atproto PDS |
26
+
|--------|--------|-------------|
27
+
| Scope check | `ScopePermissions.allowsRepo(collection, 'create')` | `permissions.assertRepo({ action: 'create', collection })` |
28
+
| Required scope | `repo:<collection>?action=create` or `transition:generic` or `atproto` | `repo:<collection>?action=create` or `transition:generic` or `atproto` |
29
+
| OAuth-only check | Yes (legacy tokens without scope bypass) | Yes (legacy Bearer bypasses) |
30
+
| Error response | 403 "Missing required scope \"repo:...?action=...\"" | 403 "Missing required scope \"repo:...?action=...\"" |
31
+
32
+
### com.atproto.repo.putRecord
33
+
34
+
| Aspect | pds.js | atproto PDS |
35
+
|--------|--------|-------------|
36
+
| Scope check | `allowsRepo(collection, 'create')` AND `allowsRepo(collection, 'update')` | `assertRepo({ action: 'create' })` AND `assertRepo({ action: 'update' })` |
37
+
| Required scope | `repo:<collection>?action=create&action=update` | `repo:<collection>?action=create&action=update` |
38
+
| Notes | Requires both since putRecord can create or update | Requires both since putRecord can create or update |
39
+
40
+
### com.atproto.repo.deleteRecord
41
+
42
+
| Aspect | pds.js | atproto PDS |
43
+
|--------|--------|-------------|
44
+
| Scope check | `ScopePermissions.allowsRepo(collection, 'delete')` | `permissions.assertRepo({ action: 'delete', collection })` |
45
+
| Required scope | `repo:<collection>?action=delete` | `repo:<collection>?action=delete` |
46
+
47
+
### com.atproto.repo.applyWrites
48
+
49
+
| Aspect | pds.js | atproto PDS |
50
+
|--------|--------|-------------|
51
+
| Scope check | Iterates all writes, checks each unique action/collection pair | Iterates all writes, asserts each unique action/collection pair |
52
+
| Required scope | All `repo:<collection>?action=<action>` for each write | All `repo:<collection>?action=<action>` for each write |
53
+
| Per-write validation | Yes | Yes |
54
+
55
+
### com.atproto.repo.uploadBlob
56
+
57
+
| Aspect | pds.js | atproto PDS |
58
+
|--------|--------|-------------|
59
+
| Scope check | `ScopePermissions.allowsBlob(contentType)` | `permissions.assertBlob({ mime: encoding })` |
60
+
| Required scope | `blob:<mime-type>` (e.g., `blob:image/*`) | `blob:<mime-type>` (e.g., `blob:image/*`) |
61
+
| MIME type awareness | Yes (validates against Content-Type) | Yes (validates against Content-Type) |
62
+
63
+
### app.bsky.actor.getPreferences
64
+
65
+
| Aspect | pds.js | atproto PDS |
66
+
|--------|--------|-------------|
67
+
| Scope check | Requires auth only | `permissions.assertRpc({ aud, lxm })` |
68
+
| Required scope | Any valid auth | `rpc:app.bsky.actor.getPreferences` |
69
+
70
+
### app.bsky.actor.putPreferences
71
+
72
+
| Aspect | pds.js | atproto PDS |
73
+
|--------|--------|-------------|
74
+
| Scope check | Requires auth only | `permissions.assertRpc({ aud, lxm })` |
75
+
| Required scope | Any valid auth | `rpc:app.bsky.actor.putPreferences` |
76
+
77
+
---
78
+
79
+
## Scope Parsing
80
+
81
+
| Feature | pds.js | atproto PDS |
82
+
|---------|--------|-------------|
83
+
| Scope string splitting | `scope.split(' ')` | `ScopesSet` class |
84
+
| Repo scope parsing | `parseRepoScope()` | `RepoPermission.fromString()` |
85
+
| Repo scope format | `repo:collection?action=create&action=update` | `repo:collection?action=create&action=update` |
86
+
| Blob scope parsing | `parseBlobScope()` | `BlobPermission.fromString()` |
87
+
| RPC scope parsing | None | `RpcPermission.fromString()` |
88
+
| Scope validation | Returns null for invalid | Validates syntax, ignores invalid |
89
+
| Action deduplication | Yes (via Set) | Yes |
90
+
| Default actions | All (create, update, delete) when no `?action=` | All (create, update, delete) when no `?action=` |
91
+
92
+
---
93
+
94
+
## Permission Checking
95
+
96
+
| Feature | pds.js | atproto PDS |
97
+
|---------|--------|-------------|
98
+
| Permission class | `ScopePermissions` | `ScopePermissions` / `ScopePermissionsTransition` |
99
+
| `allowsRepo(collection, action)` | Yes | Yes |
100
+
| `allowsBlob(mime)` | Yes (with MIME wildcard matching) | Yes (with MIME wildcard matching) |
101
+
| `allowsRpc(aud, lxm)` | N/A | Yes |
102
+
| Transition scope handling | `transition:generic` bypasses repo/blob checks | `transition:generic` bypasses repo/blob checks |
103
+
| Error messages | Specific missing scope in error | Specific missing scope in error |
104
+
105
+
---
106
+
107
+
## OAuth Flow
108
+
109
+
| Feature | pds.js | atproto PDS |
110
+
|---------|--------|-------------|
111
+
| `scopes_supported` in metadata | `['atproto']` | `['atproto']` (but accepts granular) |
112
+
| Scope validation at PAR | None | Validates syntax |
113
+
| Scope stored in token | Yes | Yes |
114
+
| Scope returned in token response | Yes | Yes |
115
+
| `atproto` scope required | Checked at endpoints | Required at token verification |
116
+
117
+
---
118
+
119
+
## Transition Scope Behavior
120
+
121
+
| Scope | pds.js | atproto PDS |
122
+
|-------|--------|-------------|
123
+
| `transition:generic` | Bypasses all repo/blob permission checks | Bypasses ALL repo/blob permission checks |
124
+
| `transition:chat.bsky` | Not implemented | Allows `chat.bsky.*` RPC methods |
125
+
| `transition:email` | Not implemented | Allows `account:email:read` |
126
+
127
+
---
128
+
129
+
## Summary
130
+
131
+
| Category | pds.js | atproto PDS |
132
+
|----------|--------|-------------|
133
+
| Scope parsing | Full parser for repo/blob | Full parser per scope type |
134
+
| Enforcement granularity | Per-collection, per-action | Per-collection, per-action |
135
+
| Transition scope support | `transition:generic` only | Full |
136
+
| MIME-aware blob scopes | Yes | Yes |
137
+
| RPC scopes | No | Yes |
138
+
| Error specificity | Names missing scope | Names missing scope |
139
+
140
+
---
141
+
142
+
## Remaining Gaps
143
+
144
+
1. **RPC scopes** โ `rpc:<aud>:<lxm>` parsing and enforcement not implemented
145
+
2. **Additional transition scopes** โ `transition:chat.bsky` and `transition:email` not implemented
146
+
3. **Scope validation at PAR** โ Could validate scope syntax during authorization request
-1
node_modules/.mf/cf.json
-1
node_modules/.mf/cf.json
···
1
-
{"httpProtocol":"HTTP/1.1","clientAcceptEncoding":"gzip, deflate, br","requestPriority":"","edgeRequestKeepAliveStatus":1,"requestHeaderNames":{},"clientTcpRtt":24,"colo":"SEA","asn":21928,"asOrganization":"T-Mobile USA, Inc.","country":"US","isEUCountry":false,"city":"Seattle","continent":"NA","region":"Washington","regionCode":"WA","timezone":"America/Los_Angeles","longitude":"-122.33207","latitude":"47.60621","postalCode":"98101","metroCode":"819","tlsVersion":"TLSv1.3","tlsCipher":"AEAD-AES256-GCM-SHA384","tlsClientRandom":"FW+sQRqZqHZpEelfAQTRk+5SBBOT8v5GshfKErxQz6Q=","tlsClientCiphersSha1":"JZtiTn8H/ntxORk+XXvU2EvNoz8=","tlsClientExtensionsSha1":"Y7DIC8A6G0/aXviZ8ie/xDbJb7g=","tlsClientExtensionsSha1Le":"6e+q3vPm88rSgMTN/h7WTTxQ2wQ=","tlsExportedAuthenticator":{"clientHandshake":"20d58b760e5aaaf0ace097da0c99e60c48fc1e2addc3a2d0a7e7a3679c83dbc0a209155c93548bea83c23b1cd3c98a9b","serverHandshake":"08a21ccd27344814299207fc42fe6904313b6d70e2b50fa834349973f26ac95957ed88e3b3a11c2568ac6157d467756c","clientFinished":"4b9eb668c3214b8b4c46bc8aa5b7fdf101837cc2e8a6ec6f717054443278647fe66b2d6f18209edd126164270ddbea3d","serverFinished":"e9bd44299ea83b3de051766258dc5d38739547de52e4a01e021a75942b7b6c771ab94832b7ea2d4a10ef5b046ded06f0"},"tlsClientHelloLength":"386","tlsClientAuth":{"certPresented":"0","certVerified":"NONE","certRevoked":"0","certIssuerDN":"","certSubjectDN":"","certIssuerDNRFC2253":"","certSubjectDNRFC2253":"","certIssuerDNLegacy":"","certSubjectDNLegacy":"","certSerial":"","certIssuerSerial":"","certSKI":"","certIssuerSKI":"","certFingerprintSHA1":"","certFingerprintSHA256":"","certNotBefore":"","certNotAfter":""},"verifiedBotCategory":"","botManagement":{"corporateProxy":false,"verifiedBot":false,"jsDetection":{"passed":false},"staticResource":false,"detectionIds":{},"score":99}}
+1715
-3
package-lock.json
+1715
-3
package-lock.json
···
1
1
{
2
-
"name": "cloudflare-pds",
2
+
"name": "pds.js",
3
3
"version": "0.1.0",
4
4
"lockfileVersion": 3,
5
5
"requires": true,
6
6
"packages": {
7
7
"": {
8
-
"name": "cloudflare-pds",
9
-
"version": "0.1.0"
8
+
"name": "pds.js",
9
+
"version": "0.1.0",
10
+
"devDependencies": {
11
+
"@biomejs/biome": "^2.3.11",
12
+
"@cloudflare/workers-types": "^4.20260103.0",
13
+
"typescript": "^5.9.3",
14
+
"wrangler": "^4.54.0"
15
+
}
16
+
},
17
+
"node_modules/@biomejs/biome": {
18
+
"version": "2.3.11",
19
+
"resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.11.tgz",
20
+
"integrity": "sha512-/zt+6qazBWguPG6+eWmiELqO+9jRsMZ/DBU3lfuU2ngtIQYzymocHhKiZRyrbra4aCOoyTg/BmY+6WH5mv9xmQ==",
21
+
"dev": true,
22
+
"license": "MIT OR Apache-2.0",
23
+
"bin": {
24
+
"biome": "bin/biome"
25
+
},
26
+
"engines": {
27
+
"node": ">=14.21.3"
28
+
},
29
+
"funding": {
30
+
"type": "opencollective",
31
+
"url": "https://opencollective.com/biome"
32
+
},
33
+
"optionalDependencies": {
34
+
"@biomejs/cli-darwin-arm64": "2.3.11",
35
+
"@biomejs/cli-darwin-x64": "2.3.11",
36
+
"@biomejs/cli-linux-arm64": "2.3.11",
37
+
"@biomejs/cli-linux-arm64-musl": "2.3.11",
38
+
"@biomejs/cli-linux-x64": "2.3.11",
39
+
"@biomejs/cli-linux-x64-musl": "2.3.11",
40
+
"@biomejs/cli-win32-arm64": "2.3.11",
41
+
"@biomejs/cli-win32-x64": "2.3.11"
42
+
}
43
+
},
44
+
"node_modules/@biomejs/cli-darwin-arm64": {
45
+
"version": "2.3.11",
46
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.11.tgz",
47
+
"integrity": "sha512-/uXXkBcPKVQY7rc9Ys2CrlirBJYbpESEDme7RKiBD6MmqR2w3j0+ZZXRIL2xiaNPsIMMNhP1YnA+jRRxoOAFrA==",
48
+
"cpu": [
49
+
"arm64"
50
+
],
51
+
"dev": true,
52
+
"license": "MIT OR Apache-2.0",
53
+
"optional": true,
54
+
"os": [
55
+
"darwin"
56
+
],
57
+
"engines": {
58
+
"node": ">=14.21.3"
59
+
}
60
+
},
61
+
"node_modules/@biomejs/cli-darwin-x64": {
62
+
"version": "2.3.11",
63
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.11.tgz",
64
+
"integrity": "sha512-fh7nnvbweDPm2xEmFjfmq7zSUiox88plgdHF9OIW4i99WnXrAC3o2P3ag9judoUMv8FCSUnlwJCM1B64nO5Fbg==",
65
+
"cpu": [
66
+
"x64"
67
+
],
68
+
"dev": true,
69
+
"license": "MIT OR Apache-2.0",
70
+
"optional": true,
71
+
"os": [
72
+
"darwin"
73
+
],
74
+
"engines": {
75
+
"node": ">=14.21.3"
76
+
}
77
+
},
78
+
"node_modules/@biomejs/cli-linux-arm64": {
79
+
"version": "2.3.11",
80
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.11.tgz",
81
+
"integrity": "sha512-l4xkGa9E7Uc0/05qU2lMYfN1H+fzzkHgaJoy98wO+b/7Gl78srbCRRgwYSW+BTLixTBrM6Ede5NSBwt7rd/i6g==",
82
+
"cpu": [
83
+
"arm64"
84
+
],
85
+
"dev": true,
86
+
"license": "MIT OR Apache-2.0",
87
+
"optional": true,
88
+
"os": [
89
+
"linux"
90
+
],
91
+
"engines": {
92
+
"node": ">=14.21.3"
93
+
}
94
+
},
95
+
"node_modules/@biomejs/cli-linux-arm64-musl": {
96
+
"version": "2.3.11",
97
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.11.tgz",
98
+
"integrity": "sha512-XPSQ+XIPZMLaZ6zveQdwNjbX+QdROEd1zPgMwD47zvHV+tCGB88VH+aynyGxAHdzL+Tm/+DtKST5SECs4iwCLg==",
99
+
"cpu": [
100
+
"arm64"
101
+
],
102
+
"dev": true,
103
+
"license": "MIT OR Apache-2.0",
104
+
"optional": true,
105
+
"os": [
106
+
"linux"
107
+
],
108
+
"engines": {
109
+
"node": ">=14.21.3"
110
+
}
111
+
},
112
+
"node_modules/@biomejs/cli-linux-x64": {
113
+
"version": "2.3.11",
114
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.11.tgz",
115
+
"integrity": "sha512-/1s9V/H3cSe0r0Mv/Z8JryF5x9ywRxywomqZVLHAoa/uN0eY7F8gEngWKNS5vbbN/BsfpCG5yeBT5ENh50Frxg==",
116
+
"cpu": [
117
+
"x64"
118
+
],
119
+
"dev": true,
120
+
"license": "MIT OR Apache-2.0",
121
+
"optional": true,
122
+
"os": [
123
+
"linux"
124
+
],
125
+
"engines": {
126
+
"node": ">=14.21.3"
127
+
}
128
+
},
129
+
"node_modules/@biomejs/cli-linux-x64-musl": {
130
+
"version": "2.3.11",
131
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.11.tgz",
132
+
"integrity": "sha512-vU7a8wLs5C9yJ4CB8a44r12aXYb8yYgBn+WeyzbMjaCMklzCv1oXr8x+VEyWodgJt9bDmhiaW/I0RHbn7rsNmw==",
133
+
"cpu": [
134
+
"x64"
135
+
],
136
+
"dev": true,
137
+
"license": "MIT OR Apache-2.0",
138
+
"optional": true,
139
+
"os": [
140
+
"linux"
141
+
],
142
+
"engines": {
143
+
"node": ">=14.21.3"
144
+
}
145
+
},
146
+
"node_modules/@biomejs/cli-win32-arm64": {
147
+
"version": "2.3.11",
148
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.11.tgz",
149
+
"integrity": "sha512-PZQ6ElCOnkYapSsysiTy0+fYX+agXPlWugh6+eQ6uPKI3vKAqNp6TnMhoM3oY2NltSB89hz59o8xIfOdyhi9Iw==",
150
+
"cpu": [
151
+
"arm64"
152
+
],
153
+
"dev": true,
154
+
"license": "MIT OR Apache-2.0",
155
+
"optional": true,
156
+
"os": [
157
+
"win32"
158
+
],
159
+
"engines": {
160
+
"node": ">=14.21.3"
161
+
}
162
+
},
163
+
"node_modules/@biomejs/cli-win32-x64": {
164
+
"version": "2.3.11",
165
+
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.11.tgz",
166
+
"integrity": "sha512-43VrG813EW+b5+YbDbz31uUsheX+qFKCpXeY9kfdAx+ww3naKxeVkTD9zLIWxUPfJquANMHrmW3wbe/037G0Qg==",
167
+
"cpu": [
168
+
"x64"
169
+
],
170
+
"dev": true,
171
+
"license": "MIT OR Apache-2.0",
172
+
"optional": true,
173
+
"os": [
174
+
"win32"
175
+
],
176
+
"engines": {
177
+
"node": ">=14.21.3"
178
+
}
179
+
},
180
+
"node_modules/@cloudflare/kv-asset-handler": {
181
+
"version": "0.4.1",
182
+
"resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.4.1.tgz",
183
+
"integrity": "sha512-Nu8ahitGFFJztxUml9oD/DLb7Z28C8cd8F46IVQ7y5Btz575pvMY8AqZsXkX7Gds29eCKdMgIHjIvzskHgPSFg==",
184
+
"dev": true,
185
+
"license": "MIT OR Apache-2.0",
186
+
"dependencies": {
187
+
"mime": "^3.0.0"
188
+
},
189
+
"engines": {
190
+
"node": ">=18.0.0"
191
+
}
192
+
},
193
+
"node_modules/@cloudflare/unenv-preset": {
194
+
"version": "2.7.13",
195
+
"resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.7.13.tgz",
196
+
"integrity": "sha512-NulO1H8R/DzsJguLC0ndMuk4Ufv0KSlN+E54ay9rn9ZCQo0kpAPwwh3LhgpZ96a3Dr6L9LqW57M4CqC34iLOvw==",
197
+
"dev": true,
198
+
"license": "MIT OR Apache-2.0",
199
+
"peerDependencies": {
200
+
"unenv": "2.0.0-rc.24",
201
+
"workerd": "^1.20251202.0"
202
+
},
203
+
"peerDependenciesMeta": {
204
+
"workerd": {
205
+
"optional": true
206
+
}
207
+
}
208
+
},
209
+
"node_modules/@cloudflare/workerd-darwin-64": {
210
+
"version": "1.20251210.0",
211
+
"resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20251210.0.tgz",
212
+
"integrity": "sha512-Nn9X1moUDERA9xtFdCQ2XpQXgAS9pOjiCxvOT8sVx9UJLAiBLkfSCGbpsYdarODGybXCpjRlc77Yppuolvt7oQ==",
213
+
"cpu": [
214
+
"x64"
215
+
],
216
+
"dev": true,
217
+
"license": "Apache-2.0",
218
+
"optional": true,
219
+
"os": [
220
+
"darwin"
221
+
],
222
+
"engines": {
223
+
"node": ">=16"
224
+
}
225
+
},
226
+
"node_modules/@cloudflare/workerd-darwin-arm64": {
227
+
"version": "1.20251210.0",
228
+
"resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20251210.0.tgz",
229
+
"integrity": "sha512-Mg8iYIZQFnbevq/ls9eW/eneWTk/EE13Pej1MwfkY5et0jVpdHnvOLywy/o+QtMJFef1AjsqXGULwAneYyBfHw==",
230
+
"cpu": [
231
+
"arm64"
232
+
],
233
+
"dev": true,
234
+
"license": "Apache-2.0",
235
+
"optional": true,
236
+
"os": [
237
+
"darwin"
238
+
],
239
+
"engines": {
240
+
"node": ">=16"
241
+
}
242
+
},
243
+
"node_modules/@cloudflare/workerd-linux-64": {
244
+
"version": "1.20251210.0",
245
+
"resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20251210.0.tgz",
246
+
"integrity": "sha512-kjC2fCZhZ2Gkm1biwk2qByAYpGguK5Gf5ic8owzSCUw0FOUfQxTZUT9Lp3gApxsfTLbbnLBrX/xzWjywH9QR4g==",
247
+
"cpu": [
248
+
"x64"
249
+
],
250
+
"dev": true,
251
+
"license": "Apache-2.0",
252
+
"optional": true,
253
+
"os": [
254
+
"linux"
255
+
],
256
+
"engines": {
257
+
"node": ">=16"
258
+
}
259
+
},
260
+
"node_modules/@cloudflare/workerd-linux-arm64": {
261
+
"version": "1.20251210.0",
262
+
"resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20251210.0.tgz",
263
+
"integrity": "sha512-2IB37nXi7PZVQLa1OCuO7/6pNxqisRSO8DmCQ5x/3sezI5op1vwOxAcb1osAnuVsVN9bbvpw70HJvhKruFJTuA==",
264
+
"cpu": [
265
+
"arm64"
266
+
],
267
+
"dev": true,
268
+
"license": "Apache-2.0",
269
+
"optional": true,
270
+
"os": [
271
+
"linux"
272
+
],
273
+
"engines": {
274
+
"node": ">=16"
275
+
}
276
+
},
277
+
"node_modules/@cloudflare/workerd-windows-64": {
278
+
"version": "1.20251210.0",
279
+
"resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20251210.0.tgz",
280
+
"integrity": "sha512-Uaz6/9XE+D6E7pCY4OvkCuJHu7HcSDzeGcCGY1HLhojXhHd7yL52c3yfiyJdS8hPatiAa0nn5qSI/42+aTdDSw==",
281
+
"cpu": [
282
+
"x64"
283
+
],
284
+
"dev": true,
285
+
"license": "Apache-2.0",
286
+
"optional": true,
287
+
"os": [
288
+
"win32"
289
+
],
290
+
"engines": {
291
+
"node": ">=16"
292
+
}
293
+
},
294
+
"node_modules/@cloudflare/workers-types": {
295
+
"version": "4.20260103.0",
296
+
"resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20260103.0.tgz",
297
+
"integrity": "sha512-jANmoGpJcXARnwlkvrQOeWyjYD1quTfHcs+++Z544XRHOSfLc4XSlts7snIhbiIGgA5bo66zDhraF+9lKUr2hw==",
298
+
"dev": true,
299
+
"license": "MIT OR Apache-2.0"
300
+
},
301
+
"node_modules/@cspotcode/source-map-support": {
302
+
"version": "0.8.1",
303
+
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
304
+
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
305
+
"dev": true,
306
+
"license": "MIT",
307
+
"dependencies": {
308
+
"@jridgewell/trace-mapping": "0.3.9"
309
+
},
310
+
"engines": {
311
+
"node": ">=12"
312
+
}
313
+
},
314
+
"node_modules/@emnapi/runtime": {
315
+
"version": "1.8.1",
316
+
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz",
317
+
"integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==",
318
+
"dev": true,
319
+
"license": "MIT",
320
+
"optional": true,
321
+
"dependencies": {
322
+
"tslib": "^2.4.0"
323
+
}
324
+
},
325
+
"node_modules/@esbuild/aix-ppc64": {
326
+
"version": "0.27.0",
327
+
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.0.tgz",
328
+
"integrity": "sha512-KuZrd2hRjz01y5JK9mEBSD3Vj3mbCvemhT466rSuJYeE/hjuBrHfjjcjMdTm/sz7au+++sdbJZJmuBwQLuw68A==",
329
+
"cpu": [
330
+
"ppc64"
331
+
],
332
+
"dev": true,
333
+
"license": "MIT",
334
+
"optional": true,
335
+
"os": [
336
+
"aix"
337
+
],
338
+
"engines": {
339
+
"node": ">=18"
340
+
}
341
+
},
342
+
"node_modules/@esbuild/android-arm": {
343
+
"version": "0.27.0",
344
+
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.0.tgz",
345
+
"integrity": "sha512-j67aezrPNYWJEOHUNLPj9maeJte7uSMM6gMoxfPC9hOg8N02JuQi/T7ewumf4tNvJadFkvLZMlAq73b9uwdMyQ==",
346
+
"cpu": [
347
+
"arm"
348
+
],
349
+
"dev": true,
350
+
"license": "MIT",
351
+
"optional": true,
352
+
"os": [
353
+
"android"
354
+
],
355
+
"engines": {
356
+
"node": ">=18"
357
+
}
358
+
},
359
+
"node_modules/@esbuild/android-arm64": {
360
+
"version": "0.27.0",
361
+
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.0.tgz",
362
+
"integrity": "sha512-CC3vt4+1xZrs97/PKDkl0yN7w8edvU2vZvAFGD16n9F0Cvniy5qvzRXjfO1l94efczkkQE6g1x0i73Qf5uthOQ==",
363
+
"cpu": [
364
+
"arm64"
365
+
],
366
+
"dev": true,
367
+
"license": "MIT",
368
+
"optional": true,
369
+
"os": [
370
+
"android"
371
+
],
372
+
"engines": {
373
+
"node": ">=18"
374
+
}
375
+
},
376
+
"node_modules/@esbuild/android-x64": {
377
+
"version": "0.27.0",
378
+
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.0.tgz",
379
+
"integrity": "sha512-wurMkF1nmQajBO1+0CJmcN17U4BP6GqNSROP8t0X/Jiw2ltYGLHpEksp9MpoBqkrFR3kv2/te6Sha26k3+yZ9Q==",
380
+
"cpu": [
381
+
"x64"
382
+
],
383
+
"dev": true,
384
+
"license": "MIT",
385
+
"optional": true,
386
+
"os": [
387
+
"android"
388
+
],
389
+
"engines": {
390
+
"node": ">=18"
391
+
}
392
+
},
393
+
"node_modules/@esbuild/darwin-arm64": {
394
+
"version": "0.27.0",
395
+
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.0.tgz",
396
+
"integrity": "sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg==",
397
+
"cpu": [
398
+
"arm64"
399
+
],
400
+
"dev": true,
401
+
"license": "MIT",
402
+
"optional": true,
403
+
"os": [
404
+
"darwin"
405
+
],
406
+
"engines": {
407
+
"node": ">=18"
408
+
}
409
+
},
410
+
"node_modules/@esbuild/darwin-x64": {
411
+
"version": "0.27.0",
412
+
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.0.tgz",
413
+
"integrity": "sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g==",
414
+
"cpu": [
415
+
"x64"
416
+
],
417
+
"dev": true,
418
+
"license": "MIT",
419
+
"optional": true,
420
+
"os": [
421
+
"darwin"
422
+
],
423
+
"engines": {
424
+
"node": ">=18"
425
+
}
426
+
},
427
+
"node_modules/@esbuild/freebsd-arm64": {
428
+
"version": "0.27.0",
429
+
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.0.tgz",
430
+
"integrity": "sha512-9FHtyO988CwNMMOE3YIeci+UV+x5Zy8fI2qHNpsEtSF83YPBmE8UWmfYAQg6Ux7Gsmd4FejZqnEUZCMGaNQHQw==",
431
+
"cpu": [
432
+
"arm64"
433
+
],
434
+
"dev": true,
435
+
"license": "MIT",
436
+
"optional": true,
437
+
"os": [
438
+
"freebsd"
439
+
],
440
+
"engines": {
441
+
"node": ">=18"
442
+
}
443
+
},
444
+
"node_modules/@esbuild/freebsd-x64": {
445
+
"version": "0.27.0",
446
+
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.0.tgz",
447
+
"integrity": "sha512-zCMeMXI4HS/tXvJz8vWGexpZj2YVtRAihHLk1imZj4efx1BQzN76YFeKqlDr3bUWI26wHwLWPd3rwh6pe4EV7g==",
448
+
"cpu": [
449
+
"x64"
450
+
],
451
+
"dev": true,
452
+
"license": "MIT",
453
+
"optional": true,
454
+
"os": [
455
+
"freebsd"
456
+
],
457
+
"engines": {
458
+
"node": ">=18"
459
+
}
460
+
},
461
+
"node_modules/@esbuild/linux-arm": {
462
+
"version": "0.27.0",
463
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.0.tgz",
464
+
"integrity": "sha512-t76XLQDpxgmq2cNXKTVEB7O7YMb42atj2Re2Haf45HkaUpjM2J0UuJZDuaGbPbamzZ7bawyGFUkodL+zcE+jvQ==",
465
+
"cpu": [
466
+
"arm"
467
+
],
468
+
"dev": true,
469
+
"license": "MIT",
470
+
"optional": true,
471
+
"os": [
472
+
"linux"
473
+
],
474
+
"engines": {
475
+
"node": ">=18"
476
+
}
477
+
},
478
+
"node_modules/@esbuild/linux-arm64": {
479
+
"version": "0.27.0",
480
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.0.tgz",
481
+
"integrity": "sha512-AS18v0V+vZiLJyi/4LphvBE+OIX682Pu7ZYNsdUHyUKSoRwdnOsMf6FDekwoAFKej14WAkOef3zAORJgAtXnlQ==",
482
+
"cpu": [
483
+
"arm64"
484
+
],
485
+
"dev": true,
486
+
"license": "MIT",
487
+
"optional": true,
488
+
"os": [
489
+
"linux"
490
+
],
491
+
"engines": {
492
+
"node": ">=18"
493
+
}
494
+
},
495
+
"node_modules/@esbuild/linux-ia32": {
496
+
"version": "0.27.0",
497
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.0.tgz",
498
+
"integrity": "sha512-Mz1jxqm/kfgKkc/KLHC5qIujMvnnarD9ra1cEcrs7qshTUSksPihGrWHVG5+osAIQ68577Zpww7SGapmzSt4Nw==",
499
+
"cpu": [
500
+
"ia32"
501
+
],
502
+
"dev": true,
503
+
"license": "MIT",
504
+
"optional": true,
505
+
"os": [
506
+
"linux"
507
+
],
508
+
"engines": {
509
+
"node": ">=18"
510
+
}
511
+
},
512
+
"node_modules/@esbuild/linux-loong64": {
513
+
"version": "0.27.0",
514
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.0.tgz",
515
+
"integrity": "sha512-QbEREjdJeIreIAbdG2hLU1yXm1uu+LTdzoq1KCo4G4pFOLlvIspBm36QrQOar9LFduavoWX2msNFAAAY9j4BDg==",
516
+
"cpu": [
517
+
"loong64"
518
+
],
519
+
"dev": true,
520
+
"license": "MIT",
521
+
"optional": true,
522
+
"os": [
523
+
"linux"
524
+
],
525
+
"engines": {
526
+
"node": ">=18"
527
+
}
528
+
},
529
+
"node_modules/@esbuild/linux-mips64el": {
530
+
"version": "0.27.0",
531
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.0.tgz",
532
+
"integrity": "sha512-sJz3zRNe4tO2wxvDpH/HYJilb6+2YJxo/ZNbVdtFiKDufzWq4JmKAiHy9iGoLjAV7r/W32VgaHGkk35cUXlNOg==",
533
+
"cpu": [
534
+
"mips64el"
535
+
],
536
+
"dev": true,
537
+
"license": "MIT",
538
+
"optional": true,
539
+
"os": [
540
+
"linux"
541
+
],
542
+
"engines": {
543
+
"node": ">=18"
544
+
}
545
+
},
546
+
"node_modules/@esbuild/linux-ppc64": {
547
+
"version": "0.27.0",
548
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.0.tgz",
549
+
"integrity": "sha512-z9N10FBD0DCS2dmSABDBb5TLAyF1/ydVb+N4pi88T45efQ/w4ohr/F/QYCkxDPnkhkp6AIpIcQKQ8F0ANoA2JA==",
550
+
"cpu": [
551
+
"ppc64"
552
+
],
553
+
"dev": true,
554
+
"license": "MIT",
555
+
"optional": true,
556
+
"os": [
557
+
"linux"
558
+
],
559
+
"engines": {
560
+
"node": ">=18"
561
+
}
562
+
},
563
+
"node_modules/@esbuild/linux-riscv64": {
564
+
"version": "0.27.0",
565
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.0.tgz",
566
+
"integrity": "sha512-pQdyAIZ0BWIC5GyvVFn5awDiO14TkT/19FTmFcPdDec94KJ1uZcmFs21Fo8auMXzD4Tt+diXu1LW1gHus9fhFQ==",
567
+
"cpu": [
568
+
"riscv64"
569
+
],
570
+
"dev": true,
571
+
"license": "MIT",
572
+
"optional": true,
573
+
"os": [
574
+
"linux"
575
+
],
576
+
"engines": {
577
+
"node": ">=18"
578
+
}
579
+
},
580
+
"node_modules/@esbuild/linux-s390x": {
581
+
"version": "0.27.0",
582
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.0.tgz",
583
+
"integrity": "sha512-hPlRWR4eIDDEci953RI1BLZitgi5uqcsjKMxwYfmi4LcwyWo2IcRP+lThVnKjNtk90pLS8nKdroXYOqW+QQH+w==",
584
+
"cpu": [
585
+
"s390x"
586
+
],
587
+
"dev": true,
588
+
"license": "MIT",
589
+
"optional": true,
590
+
"os": [
591
+
"linux"
592
+
],
593
+
"engines": {
594
+
"node": ">=18"
595
+
}
596
+
},
597
+
"node_modules/@esbuild/linux-x64": {
598
+
"version": "0.27.0",
599
+
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.0.tgz",
600
+
"integrity": "sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw==",
601
+
"cpu": [
602
+
"x64"
603
+
],
604
+
"dev": true,
605
+
"license": "MIT",
606
+
"optional": true,
607
+
"os": [
608
+
"linux"
609
+
],
610
+
"engines": {
611
+
"node": ">=18"
612
+
}
613
+
},
614
+
"node_modules/@esbuild/netbsd-arm64": {
615
+
"version": "0.27.0",
616
+
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.0.tgz",
617
+
"integrity": "sha512-6m0sfQfxfQfy1qRuecMkJlf1cIzTOgyaeXaiVaaki8/v+WB+U4hc6ik15ZW6TAllRlg/WuQXxWj1jx6C+dfy3w==",
618
+
"cpu": [
619
+
"arm64"
620
+
],
621
+
"dev": true,
622
+
"license": "MIT",
623
+
"optional": true,
624
+
"os": [
625
+
"netbsd"
626
+
],
627
+
"engines": {
628
+
"node": ">=18"
629
+
}
630
+
},
631
+
"node_modules/@esbuild/netbsd-x64": {
632
+
"version": "0.27.0",
633
+
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.0.tgz",
634
+
"integrity": "sha512-xbbOdfn06FtcJ9d0ShxxvSn2iUsGd/lgPIO2V3VZIPDbEaIj1/3nBBe1AwuEZKXVXkMmpr6LUAgMkLD/4D2PPA==",
635
+
"cpu": [
636
+
"x64"
637
+
],
638
+
"dev": true,
639
+
"license": "MIT",
640
+
"optional": true,
641
+
"os": [
642
+
"netbsd"
643
+
],
644
+
"engines": {
645
+
"node": ">=18"
646
+
}
647
+
},
648
+
"node_modules/@esbuild/openbsd-arm64": {
649
+
"version": "0.27.0",
650
+
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.0.tgz",
651
+
"integrity": "sha512-fWgqR8uNbCQ/GGv0yhzttj6sU/9Z5/Sv/VGU3F5OuXK6J6SlriONKrQ7tNlwBrJZXRYk5jUhuWvF7GYzGguBZQ==",
652
+
"cpu": [
653
+
"arm64"
654
+
],
655
+
"dev": true,
656
+
"license": "MIT",
657
+
"optional": true,
658
+
"os": [
659
+
"openbsd"
660
+
],
661
+
"engines": {
662
+
"node": ">=18"
663
+
}
664
+
},
665
+
"node_modules/@esbuild/openbsd-x64": {
666
+
"version": "0.27.0",
667
+
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.0.tgz",
668
+
"integrity": "sha512-aCwlRdSNMNxkGGqQajMUza6uXzR/U0dIl1QmLjPtRbLOx3Gy3otfFu/VjATy4yQzo9yFDGTxYDo1FfAD9oRD2A==",
669
+
"cpu": [
670
+
"x64"
671
+
],
672
+
"dev": true,
673
+
"license": "MIT",
674
+
"optional": true,
675
+
"os": [
676
+
"openbsd"
677
+
],
678
+
"engines": {
679
+
"node": ">=18"
680
+
}
681
+
},
682
+
"node_modules/@esbuild/openharmony-arm64": {
683
+
"version": "0.27.0",
684
+
"resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.0.tgz",
685
+
"integrity": "sha512-nyvsBccxNAsNYz2jVFYwEGuRRomqZ149A39SHWk4hV0jWxKM0hjBPm3AmdxcbHiFLbBSwG6SbpIcUbXjgyECfA==",
686
+
"cpu": [
687
+
"arm64"
688
+
],
689
+
"dev": true,
690
+
"license": "MIT",
691
+
"optional": true,
692
+
"os": [
693
+
"openharmony"
694
+
],
695
+
"engines": {
696
+
"node": ">=18"
697
+
}
698
+
},
699
+
"node_modules/@esbuild/sunos-x64": {
700
+
"version": "0.27.0",
701
+
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.0.tgz",
702
+
"integrity": "sha512-Q1KY1iJafM+UX6CFEL+F4HRTgygmEW568YMqDA5UV97AuZSm21b7SXIrRJDwXWPzr8MGr75fUZPV67FdtMHlHA==",
703
+
"cpu": [
704
+
"x64"
705
+
],
706
+
"dev": true,
707
+
"license": "MIT",
708
+
"optional": true,
709
+
"os": [
710
+
"sunos"
711
+
],
712
+
"engines": {
713
+
"node": ">=18"
714
+
}
715
+
},
716
+
"node_modules/@esbuild/win32-arm64": {
717
+
"version": "0.27.0",
718
+
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.0.tgz",
719
+
"integrity": "sha512-W1eyGNi6d+8kOmZIwi/EDjrL9nxQIQ0MiGqe/AWc6+IaHloxHSGoeRgDRKHFISThLmsewZ5nHFvGFWdBYlgKPg==",
720
+
"cpu": [
721
+
"arm64"
722
+
],
723
+
"dev": true,
724
+
"license": "MIT",
725
+
"optional": true,
726
+
"os": [
727
+
"win32"
728
+
],
729
+
"engines": {
730
+
"node": ">=18"
731
+
}
732
+
},
733
+
"node_modules/@esbuild/win32-ia32": {
734
+
"version": "0.27.0",
735
+
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.0.tgz",
736
+
"integrity": "sha512-30z1aKL9h22kQhilnYkORFYt+3wp7yZsHWus+wSKAJR8JtdfI76LJ4SBdMsCopTR3z/ORqVu5L1vtnHZWVj4cQ==",
737
+
"cpu": [
738
+
"ia32"
739
+
],
740
+
"dev": true,
741
+
"license": "MIT",
742
+
"optional": true,
743
+
"os": [
744
+
"win32"
745
+
],
746
+
"engines": {
747
+
"node": ">=18"
748
+
}
749
+
},
750
+
"node_modules/@esbuild/win32-x64": {
751
+
"version": "0.27.0",
752
+
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.0.tgz",
753
+
"integrity": "sha512-aIitBcjQeyOhMTImhLZmtxfdOcuNRpwlPNmlFKPcHQYPhEssw75Cl1TSXJXpMkzaua9FUetx/4OQKq7eJul5Cg==",
754
+
"cpu": [
755
+
"x64"
756
+
],
757
+
"dev": true,
758
+
"license": "MIT",
759
+
"optional": true,
760
+
"os": [
761
+
"win32"
762
+
],
763
+
"engines": {
764
+
"node": ">=18"
765
+
}
766
+
},
767
+
"node_modules/@img/sharp-darwin-arm64": {
768
+
"version": "0.33.5",
769
+
"resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz",
770
+
"integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==",
771
+
"cpu": [
772
+
"arm64"
773
+
],
774
+
"dev": true,
775
+
"license": "Apache-2.0",
776
+
"optional": true,
777
+
"os": [
778
+
"darwin"
779
+
],
780
+
"engines": {
781
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
782
+
},
783
+
"funding": {
784
+
"url": "https://opencollective.com/libvips"
785
+
},
786
+
"optionalDependencies": {
787
+
"@img/sharp-libvips-darwin-arm64": "1.0.4"
788
+
}
789
+
},
790
+
"node_modules/@img/sharp-darwin-x64": {
791
+
"version": "0.33.5",
792
+
"resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz",
793
+
"integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==",
794
+
"cpu": [
795
+
"x64"
796
+
],
797
+
"dev": true,
798
+
"license": "Apache-2.0",
799
+
"optional": true,
800
+
"os": [
801
+
"darwin"
802
+
],
803
+
"engines": {
804
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
805
+
},
806
+
"funding": {
807
+
"url": "https://opencollective.com/libvips"
808
+
},
809
+
"optionalDependencies": {
810
+
"@img/sharp-libvips-darwin-x64": "1.0.4"
811
+
}
812
+
},
813
+
"node_modules/@img/sharp-libvips-darwin-arm64": {
814
+
"version": "1.0.4",
815
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz",
816
+
"integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==",
817
+
"cpu": [
818
+
"arm64"
819
+
],
820
+
"dev": true,
821
+
"license": "LGPL-3.0-or-later",
822
+
"optional": true,
823
+
"os": [
824
+
"darwin"
825
+
],
826
+
"funding": {
827
+
"url": "https://opencollective.com/libvips"
828
+
}
829
+
},
830
+
"node_modules/@img/sharp-libvips-darwin-x64": {
831
+
"version": "1.0.4",
832
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz",
833
+
"integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==",
834
+
"cpu": [
835
+
"x64"
836
+
],
837
+
"dev": true,
838
+
"license": "LGPL-3.0-or-later",
839
+
"optional": true,
840
+
"os": [
841
+
"darwin"
842
+
],
843
+
"funding": {
844
+
"url": "https://opencollective.com/libvips"
845
+
}
846
+
},
847
+
"node_modules/@img/sharp-libvips-linux-arm": {
848
+
"version": "1.0.5",
849
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz",
850
+
"integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==",
851
+
"cpu": [
852
+
"arm"
853
+
],
854
+
"dev": true,
855
+
"license": "LGPL-3.0-or-later",
856
+
"optional": true,
857
+
"os": [
858
+
"linux"
859
+
],
860
+
"funding": {
861
+
"url": "https://opencollective.com/libvips"
862
+
}
863
+
},
864
+
"node_modules/@img/sharp-libvips-linux-arm64": {
865
+
"version": "1.0.4",
866
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz",
867
+
"integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==",
868
+
"cpu": [
869
+
"arm64"
870
+
],
871
+
"dev": true,
872
+
"license": "LGPL-3.0-or-later",
873
+
"optional": true,
874
+
"os": [
875
+
"linux"
876
+
],
877
+
"funding": {
878
+
"url": "https://opencollective.com/libvips"
879
+
}
880
+
},
881
+
"node_modules/@img/sharp-libvips-linux-s390x": {
882
+
"version": "1.0.4",
883
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz",
884
+
"integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==",
885
+
"cpu": [
886
+
"s390x"
887
+
],
888
+
"dev": true,
889
+
"license": "LGPL-3.0-or-later",
890
+
"optional": true,
891
+
"os": [
892
+
"linux"
893
+
],
894
+
"funding": {
895
+
"url": "https://opencollective.com/libvips"
896
+
}
897
+
},
898
+
"node_modules/@img/sharp-libvips-linux-x64": {
899
+
"version": "1.0.4",
900
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz",
901
+
"integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==",
902
+
"cpu": [
903
+
"x64"
904
+
],
905
+
"dev": true,
906
+
"license": "LGPL-3.0-or-later",
907
+
"optional": true,
908
+
"os": [
909
+
"linux"
910
+
],
911
+
"funding": {
912
+
"url": "https://opencollective.com/libvips"
913
+
}
914
+
},
915
+
"node_modules/@img/sharp-libvips-linuxmusl-arm64": {
916
+
"version": "1.0.4",
917
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz",
918
+
"integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==",
919
+
"cpu": [
920
+
"arm64"
921
+
],
922
+
"dev": true,
923
+
"license": "LGPL-3.0-or-later",
924
+
"optional": true,
925
+
"os": [
926
+
"linux"
927
+
],
928
+
"funding": {
929
+
"url": "https://opencollective.com/libvips"
930
+
}
931
+
},
932
+
"node_modules/@img/sharp-libvips-linuxmusl-x64": {
933
+
"version": "1.0.4",
934
+
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz",
935
+
"integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==",
936
+
"cpu": [
937
+
"x64"
938
+
],
939
+
"dev": true,
940
+
"license": "LGPL-3.0-or-later",
941
+
"optional": true,
942
+
"os": [
943
+
"linux"
944
+
],
945
+
"funding": {
946
+
"url": "https://opencollective.com/libvips"
947
+
}
948
+
},
949
+
"node_modules/@img/sharp-linux-arm": {
950
+
"version": "0.33.5",
951
+
"resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz",
952
+
"integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==",
953
+
"cpu": [
954
+
"arm"
955
+
],
956
+
"dev": true,
957
+
"license": "Apache-2.0",
958
+
"optional": true,
959
+
"os": [
960
+
"linux"
961
+
],
962
+
"engines": {
963
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
964
+
},
965
+
"funding": {
966
+
"url": "https://opencollective.com/libvips"
967
+
},
968
+
"optionalDependencies": {
969
+
"@img/sharp-libvips-linux-arm": "1.0.5"
970
+
}
971
+
},
972
+
"node_modules/@img/sharp-linux-arm64": {
973
+
"version": "0.33.5",
974
+
"resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz",
975
+
"integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==",
976
+
"cpu": [
977
+
"arm64"
978
+
],
979
+
"dev": true,
980
+
"license": "Apache-2.0",
981
+
"optional": true,
982
+
"os": [
983
+
"linux"
984
+
],
985
+
"engines": {
986
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
987
+
},
988
+
"funding": {
989
+
"url": "https://opencollective.com/libvips"
990
+
},
991
+
"optionalDependencies": {
992
+
"@img/sharp-libvips-linux-arm64": "1.0.4"
993
+
}
994
+
},
995
+
"node_modules/@img/sharp-linux-s390x": {
996
+
"version": "0.33.5",
997
+
"resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz",
998
+
"integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==",
999
+
"cpu": [
1000
+
"s390x"
1001
+
],
1002
+
"dev": true,
1003
+
"license": "Apache-2.0",
1004
+
"optional": true,
1005
+
"os": [
1006
+
"linux"
1007
+
],
1008
+
"engines": {
1009
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1010
+
},
1011
+
"funding": {
1012
+
"url": "https://opencollective.com/libvips"
1013
+
},
1014
+
"optionalDependencies": {
1015
+
"@img/sharp-libvips-linux-s390x": "1.0.4"
1016
+
}
1017
+
},
1018
+
"node_modules/@img/sharp-linux-x64": {
1019
+
"version": "0.33.5",
1020
+
"resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz",
1021
+
"integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==",
1022
+
"cpu": [
1023
+
"x64"
1024
+
],
1025
+
"dev": true,
1026
+
"license": "Apache-2.0",
1027
+
"optional": true,
1028
+
"os": [
1029
+
"linux"
1030
+
],
1031
+
"engines": {
1032
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1033
+
},
1034
+
"funding": {
1035
+
"url": "https://opencollective.com/libvips"
1036
+
},
1037
+
"optionalDependencies": {
1038
+
"@img/sharp-libvips-linux-x64": "1.0.4"
1039
+
}
1040
+
},
1041
+
"node_modules/@img/sharp-linuxmusl-arm64": {
1042
+
"version": "0.33.5",
1043
+
"resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz",
1044
+
"integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==",
1045
+
"cpu": [
1046
+
"arm64"
1047
+
],
1048
+
"dev": true,
1049
+
"license": "Apache-2.0",
1050
+
"optional": true,
1051
+
"os": [
1052
+
"linux"
1053
+
],
1054
+
"engines": {
1055
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1056
+
},
1057
+
"funding": {
1058
+
"url": "https://opencollective.com/libvips"
1059
+
},
1060
+
"optionalDependencies": {
1061
+
"@img/sharp-libvips-linuxmusl-arm64": "1.0.4"
1062
+
}
1063
+
},
1064
+
"node_modules/@img/sharp-linuxmusl-x64": {
1065
+
"version": "0.33.5",
1066
+
"resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz",
1067
+
"integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==",
1068
+
"cpu": [
1069
+
"x64"
1070
+
],
1071
+
"dev": true,
1072
+
"license": "Apache-2.0",
1073
+
"optional": true,
1074
+
"os": [
1075
+
"linux"
1076
+
],
1077
+
"engines": {
1078
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1079
+
},
1080
+
"funding": {
1081
+
"url": "https://opencollective.com/libvips"
1082
+
},
1083
+
"optionalDependencies": {
1084
+
"@img/sharp-libvips-linuxmusl-x64": "1.0.4"
1085
+
}
1086
+
},
1087
+
"node_modules/@img/sharp-wasm32": {
1088
+
"version": "0.33.5",
1089
+
"resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz",
1090
+
"integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==",
1091
+
"cpu": [
1092
+
"wasm32"
1093
+
],
1094
+
"dev": true,
1095
+
"license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT",
1096
+
"optional": true,
1097
+
"dependencies": {
1098
+
"@emnapi/runtime": "^1.2.0"
1099
+
},
1100
+
"engines": {
1101
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1102
+
},
1103
+
"funding": {
1104
+
"url": "https://opencollective.com/libvips"
1105
+
}
1106
+
},
1107
+
"node_modules/@img/sharp-win32-ia32": {
1108
+
"version": "0.33.5",
1109
+
"resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz",
1110
+
"integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==",
1111
+
"cpu": [
1112
+
"ia32"
1113
+
],
1114
+
"dev": true,
1115
+
"license": "Apache-2.0 AND LGPL-3.0-or-later",
1116
+
"optional": true,
1117
+
"os": [
1118
+
"win32"
1119
+
],
1120
+
"engines": {
1121
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1122
+
},
1123
+
"funding": {
1124
+
"url": "https://opencollective.com/libvips"
1125
+
}
1126
+
},
1127
+
"node_modules/@img/sharp-win32-x64": {
1128
+
"version": "0.33.5",
1129
+
"resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz",
1130
+
"integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==",
1131
+
"cpu": [
1132
+
"x64"
1133
+
],
1134
+
"dev": true,
1135
+
"license": "Apache-2.0 AND LGPL-3.0-or-later",
1136
+
"optional": true,
1137
+
"os": [
1138
+
"win32"
1139
+
],
1140
+
"engines": {
1141
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1142
+
},
1143
+
"funding": {
1144
+
"url": "https://opencollective.com/libvips"
1145
+
}
1146
+
},
1147
+
"node_modules/@jridgewell/resolve-uri": {
1148
+
"version": "3.1.2",
1149
+
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
1150
+
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
1151
+
"dev": true,
1152
+
"license": "MIT",
1153
+
"engines": {
1154
+
"node": ">=6.0.0"
1155
+
}
1156
+
},
1157
+
"node_modules/@jridgewell/sourcemap-codec": {
1158
+
"version": "1.5.5",
1159
+
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
1160
+
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
1161
+
"dev": true,
1162
+
"license": "MIT"
1163
+
},
1164
+
"node_modules/@jridgewell/trace-mapping": {
1165
+
"version": "0.3.9",
1166
+
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
1167
+
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
1168
+
"dev": true,
1169
+
"license": "MIT",
1170
+
"dependencies": {
1171
+
"@jridgewell/resolve-uri": "^3.0.3",
1172
+
"@jridgewell/sourcemap-codec": "^1.4.10"
1173
+
}
1174
+
},
1175
+
"node_modules/@poppinss/colors": {
1176
+
"version": "4.1.6",
1177
+
"resolved": "https://registry.npmjs.org/@poppinss/colors/-/colors-4.1.6.tgz",
1178
+
"integrity": "sha512-H9xkIdFswbS8n1d6vmRd8+c10t2Qe+rZITbbDHHkQixH5+2x1FDGmi/0K+WgWiqQFKPSlIYB7jlH6Kpfn6Fleg==",
1179
+
"dev": true,
1180
+
"license": "MIT",
1181
+
"dependencies": {
1182
+
"kleur": "^4.1.5"
1183
+
}
1184
+
},
1185
+
"node_modules/@poppinss/dumper": {
1186
+
"version": "0.6.5",
1187
+
"resolved": "https://registry.npmjs.org/@poppinss/dumper/-/dumper-0.6.5.tgz",
1188
+
"integrity": "sha512-NBdYIb90J7LfOI32dOewKI1r7wnkiH6m920puQ3qHUeZkxNkQiFnXVWoE6YtFSv6QOiPPf7ys6i+HWWecDz7sw==",
1189
+
"dev": true,
1190
+
"license": "MIT",
1191
+
"dependencies": {
1192
+
"@poppinss/colors": "^4.1.5",
1193
+
"@sindresorhus/is": "^7.0.2",
1194
+
"supports-color": "^10.0.0"
1195
+
}
1196
+
},
1197
+
"node_modules/@poppinss/exception": {
1198
+
"version": "1.2.3",
1199
+
"resolved": "https://registry.npmjs.org/@poppinss/exception/-/exception-1.2.3.tgz",
1200
+
"integrity": "sha512-dCED+QRChTVatE9ibtoaxc+WkdzOSjYTKi/+uacHWIsfodVfpsueo3+DKpgU5Px8qXjgmXkSvhXvSCz3fnP9lw==",
1201
+
"dev": true,
1202
+
"license": "MIT"
1203
+
},
1204
+
"node_modules/@sindresorhus/is": {
1205
+
"version": "7.2.0",
1206
+
"resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-7.2.0.tgz",
1207
+
"integrity": "sha512-P1Cz1dWaFfR4IR+U13mqqiGsLFf1KbayybWwdd2vfctdV6hDpUkgCY0nKOLLTMSoRd/jJNjtbqzf13K8DCCXQw==",
1208
+
"dev": true,
1209
+
"license": "MIT",
1210
+
"engines": {
1211
+
"node": ">=18"
1212
+
},
1213
+
"funding": {
1214
+
"url": "https://github.com/sindresorhus/is?sponsor=1"
1215
+
}
1216
+
},
1217
+
"node_modules/@speed-highlight/core": {
1218
+
"version": "1.2.14",
1219
+
"resolved": "https://registry.npmjs.org/@speed-highlight/core/-/core-1.2.14.tgz",
1220
+
"integrity": "sha512-G4ewlBNhUtlLvrJTb88d2mdy2KRijzs4UhnlrOSRT4bmjh/IqNElZa3zkrZ+TC47TwtlDWzVLFADljF1Ijp5hA==",
1221
+
"dev": true,
1222
+
"license": "CC0-1.0"
1223
+
},
1224
+
"node_modules/acorn": {
1225
+
"version": "8.14.0",
1226
+
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
1227
+
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
1228
+
"dev": true,
1229
+
"license": "MIT",
1230
+
"bin": {
1231
+
"acorn": "bin/acorn"
1232
+
},
1233
+
"engines": {
1234
+
"node": ">=0.4.0"
1235
+
}
1236
+
},
1237
+
"node_modules/acorn-walk": {
1238
+
"version": "8.3.2",
1239
+
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz",
1240
+
"integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==",
1241
+
"dev": true,
1242
+
"license": "MIT",
1243
+
"engines": {
1244
+
"node": ">=0.4.0"
1245
+
}
1246
+
},
1247
+
"node_modules/blake3-wasm": {
1248
+
"version": "2.1.5",
1249
+
"resolved": "https://registry.npmjs.org/blake3-wasm/-/blake3-wasm-2.1.5.tgz",
1250
+
"integrity": "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==",
1251
+
"dev": true,
1252
+
"license": "MIT"
1253
+
},
1254
+
"node_modules/color": {
1255
+
"version": "4.2.3",
1256
+
"resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
1257
+
"integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
1258
+
"dev": true,
1259
+
"license": "MIT",
1260
+
"dependencies": {
1261
+
"color-convert": "^2.0.1",
1262
+
"color-string": "^1.9.0"
1263
+
},
1264
+
"engines": {
1265
+
"node": ">=12.5.0"
1266
+
}
1267
+
},
1268
+
"node_modules/color-convert": {
1269
+
"version": "2.0.1",
1270
+
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
1271
+
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
1272
+
"dev": true,
1273
+
"license": "MIT",
1274
+
"dependencies": {
1275
+
"color-name": "~1.1.4"
1276
+
},
1277
+
"engines": {
1278
+
"node": ">=7.0.0"
1279
+
}
1280
+
},
1281
+
"node_modules/color-name": {
1282
+
"version": "1.1.4",
1283
+
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
1284
+
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
1285
+
"dev": true,
1286
+
"license": "MIT"
1287
+
},
1288
+
"node_modules/color-string": {
1289
+
"version": "1.9.1",
1290
+
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
1291
+
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
1292
+
"dev": true,
1293
+
"license": "MIT",
1294
+
"dependencies": {
1295
+
"color-name": "^1.0.0",
1296
+
"simple-swizzle": "^0.2.2"
1297
+
}
1298
+
},
1299
+
"node_modules/cookie": {
1300
+
"version": "1.1.1",
1301
+
"resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz",
1302
+
"integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==",
1303
+
"dev": true,
1304
+
"license": "MIT",
1305
+
"engines": {
1306
+
"node": ">=18"
1307
+
},
1308
+
"funding": {
1309
+
"type": "opencollective",
1310
+
"url": "https://opencollective.com/express"
1311
+
}
1312
+
},
1313
+
"node_modules/detect-libc": {
1314
+
"version": "2.1.2",
1315
+
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
1316
+
"integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==",
1317
+
"dev": true,
1318
+
"license": "Apache-2.0",
1319
+
"engines": {
1320
+
"node": ">=8"
1321
+
}
1322
+
},
1323
+
"node_modules/error-stack-parser-es": {
1324
+
"version": "1.0.5",
1325
+
"resolved": "https://registry.npmjs.org/error-stack-parser-es/-/error-stack-parser-es-1.0.5.tgz",
1326
+
"integrity": "sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==",
1327
+
"dev": true,
1328
+
"license": "MIT",
1329
+
"funding": {
1330
+
"url": "https://github.com/sponsors/antfu"
1331
+
}
1332
+
},
1333
+
"node_modules/esbuild": {
1334
+
"version": "0.27.0",
1335
+
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.0.tgz",
1336
+
"integrity": "sha512-jd0f4NHbD6cALCyGElNpGAOtWxSq46l9X/sWB0Nzd5er4Kz2YTm+Vl0qKFT9KUJvD8+fiO8AvoHhFvEatfVixA==",
1337
+
"dev": true,
1338
+
"hasInstallScript": true,
1339
+
"license": "MIT",
1340
+
"bin": {
1341
+
"esbuild": "bin/esbuild"
1342
+
},
1343
+
"engines": {
1344
+
"node": ">=18"
1345
+
},
1346
+
"optionalDependencies": {
1347
+
"@esbuild/aix-ppc64": "0.27.0",
1348
+
"@esbuild/android-arm": "0.27.0",
1349
+
"@esbuild/android-arm64": "0.27.0",
1350
+
"@esbuild/android-x64": "0.27.0",
1351
+
"@esbuild/darwin-arm64": "0.27.0",
1352
+
"@esbuild/darwin-x64": "0.27.0",
1353
+
"@esbuild/freebsd-arm64": "0.27.0",
1354
+
"@esbuild/freebsd-x64": "0.27.0",
1355
+
"@esbuild/linux-arm": "0.27.0",
1356
+
"@esbuild/linux-arm64": "0.27.0",
1357
+
"@esbuild/linux-ia32": "0.27.0",
1358
+
"@esbuild/linux-loong64": "0.27.0",
1359
+
"@esbuild/linux-mips64el": "0.27.0",
1360
+
"@esbuild/linux-ppc64": "0.27.0",
1361
+
"@esbuild/linux-riscv64": "0.27.0",
1362
+
"@esbuild/linux-s390x": "0.27.0",
1363
+
"@esbuild/linux-x64": "0.27.0",
1364
+
"@esbuild/netbsd-arm64": "0.27.0",
1365
+
"@esbuild/netbsd-x64": "0.27.0",
1366
+
"@esbuild/openbsd-arm64": "0.27.0",
1367
+
"@esbuild/openbsd-x64": "0.27.0",
1368
+
"@esbuild/openharmony-arm64": "0.27.0",
1369
+
"@esbuild/sunos-x64": "0.27.0",
1370
+
"@esbuild/win32-arm64": "0.27.0",
1371
+
"@esbuild/win32-ia32": "0.27.0",
1372
+
"@esbuild/win32-x64": "0.27.0"
1373
+
}
1374
+
},
1375
+
"node_modules/exit-hook": {
1376
+
"version": "2.2.1",
1377
+
"resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-2.2.1.tgz",
1378
+
"integrity": "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==",
1379
+
"dev": true,
1380
+
"license": "MIT",
1381
+
"engines": {
1382
+
"node": ">=6"
1383
+
},
1384
+
"funding": {
1385
+
"url": "https://github.com/sponsors/sindresorhus"
1386
+
}
1387
+
},
1388
+
"node_modules/fsevents": {
1389
+
"version": "2.3.3",
1390
+
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
1391
+
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
1392
+
"dev": true,
1393
+
"hasInstallScript": true,
1394
+
"license": "MIT",
1395
+
"optional": true,
1396
+
"os": [
1397
+
"darwin"
1398
+
],
1399
+
"engines": {
1400
+
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
1401
+
}
1402
+
},
1403
+
"node_modules/glob-to-regexp": {
1404
+
"version": "0.4.1",
1405
+
"resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
1406
+
"integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
1407
+
"dev": true,
1408
+
"license": "BSD-2-Clause"
1409
+
},
1410
+
"node_modules/is-arrayish": {
1411
+
"version": "0.3.4",
1412
+
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.4.tgz",
1413
+
"integrity": "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==",
1414
+
"dev": true,
1415
+
"license": "MIT"
1416
+
},
1417
+
"node_modules/kleur": {
1418
+
"version": "4.1.5",
1419
+
"resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz",
1420
+
"integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==",
1421
+
"dev": true,
1422
+
"license": "MIT",
1423
+
"engines": {
1424
+
"node": ">=6"
1425
+
}
1426
+
},
1427
+
"node_modules/mime": {
1428
+
"version": "3.0.0",
1429
+
"resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
1430
+
"integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
1431
+
"dev": true,
1432
+
"license": "MIT",
1433
+
"bin": {
1434
+
"mime": "cli.js"
1435
+
},
1436
+
"engines": {
1437
+
"node": ">=10.0.0"
1438
+
}
1439
+
},
1440
+
"node_modules/miniflare": {
1441
+
"version": "4.20251210.0",
1442
+
"resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20251210.0.tgz",
1443
+
"integrity": "sha512-k6kIoXwGVqlPZb0hcn+X7BmnK+8BjIIkusQPY22kCo2RaQJ/LzAjtxHQdGXerlHSnJyQivDQsL6BJHMpQfUFyw==",
1444
+
"dev": true,
1445
+
"license": "MIT",
1446
+
"dependencies": {
1447
+
"@cspotcode/source-map-support": "0.8.1",
1448
+
"acorn": "8.14.0",
1449
+
"acorn-walk": "8.3.2",
1450
+
"exit-hook": "2.2.1",
1451
+
"glob-to-regexp": "0.4.1",
1452
+
"sharp": "^0.33.5",
1453
+
"stoppable": "1.1.0",
1454
+
"undici": "7.14.0",
1455
+
"workerd": "1.20251210.0",
1456
+
"ws": "8.18.0",
1457
+
"youch": "4.1.0-beta.10",
1458
+
"zod": "3.22.3"
1459
+
},
1460
+
"bin": {
1461
+
"miniflare": "bootstrap.js"
1462
+
},
1463
+
"engines": {
1464
+
"node": ">=18.0.0"
1465
+
}
1466
+
},
1467
+
"node_modules/path-to-regexp": {
1468
+
"version": "6.3.0",
1469
+
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz",
1470
+
"integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==",
1471
+
"dev": true,
1472
+
"license": "MIT"
1473
+
},
1474
+
"node_modules/pathe": {
1475
+
"version": "2.0.3",
1476
+
"resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
1477
+
"integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
1478
+
"dev": true,
1479
+
"license": "MIT"
1480
+
},
1481
+
"node_modules/semver": {
1482
+
"version": "7.7.3",
1483
+
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
1484
+
"integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
1485
+
"dev": true,
1486
+
"license": "ISC",
1487
+
"bin": {
1488
+
"semver": "bin/semver.js"
1489
+
},
1490
+
"engines": {
1491
+
"node": ">=10"
1492
+
}
1493
+
},
1494
+
"node_modules/sharp": {
1495
+
"version": "0.33.5",
1496
+
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz",
1497
+
"integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==",
1498
+
"dev": true,
1499
+
"hasInstallScript": true,
1500
+
"license": "Apache-2.0",
1501
+
"dependencies": {
1502
+
"color": "^4.2.3",
1503
+
"detect-libc": "^2.0.3",
1504
+
"semver": "^7.6.3"
1505
+
},
1506
+
"engines": {
1507
+
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
1508
+
},
1509
+
"funding": {
1510
+
"url": "https://opencollective.com/libvips"
1511
+
},
1512
+
"optionalDependencies": {
1513
+
"@img/sharp-darwin-arm64": "0.33.5",
1514
+
"@img/sharp-darwin-x64": "0.33.5",
1515
+
"@img/sharp-libvips-darwin-arm64": "1.0.4",
1516
+
"@img/sharp-libvips-darwin-x64": "1.0.4",
1517
+
"@img/sharp-libvips-linux-arm": "1.0.5",
1518
+
"@img/sharp-libvips-linux-arm64": "1.0.4",
1519
+
"@img/sharp-libvips-linux-s390x": "1.0.4",
1520
+
"@img/sharp-libvips-linux-x64": "1.0.4",
1521
+
"@img/sharp-libvips-linuxmusl-arm64": "1.0.4",
1522
+
"@img/sharp-libvips-linuxmusl-x64": "1.0.4",
1523
+
"@img/sharp-linux-arm": "0.33.5",
1524
+
"@img/sharp-linux-arm64": "0.33.5",
1525
+
"@img/sharp-linux-s390x": "0.33.5",
1526
+
"@img/sharp-linux-x64": "0.33.5",
1527
+
"@img/sharp-linuxmusl-arm64": "0.33.5",
1528
+
"@img/sharp-linuxmusl-x64": "0.33.5",
1529
+
"@img/sharp-wasm32": "0.33.5",
1530
+
"@img/sharp-win32-ia32": "0.33.5",
1531
+
"@img/sharp-win32-x64": "0.33.5"
1532
+
}
1533
+
},
1534
+
"node_modules/simple-swizzle": {
1535
+
"version": "0.2.4",
1536
+
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.4.tgz",
1537
+
"integrity": "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==",
1538
+
"dev": true,
1539
+
"license": "MIT",
1540
+
"dependencies": {
1541
+
"is-arrayish": "^0.3.1"
1542
+
}
1543
+
},
1544
+
"node_modules/stoppable": {
1545
+
"version": "1.1.0",
1546
+
"resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz",
1547
+
"integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==",
1548
+
"dev": true,
1549
+
"license": "MIT",
1550
+
"engines": {
1551
+
"node": ">=4",
1552
+
"npm": ">=6"
1553
+
}
1554
+
},
1555
+
"node_modules/supports-color": {
1556
+
"version": "10.2.2",
1557
+
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz",
1558
+
"integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==",
1559
+
"dev": true,
1560
+
"license": "MIT",
1561
+
"engines": {
1562
+
"node": ">=18"
1563
+
},
1564
+
"funding": {
1565
+
"url": "https://github.com/chalk/supports-color?sponsor=1"
1566
+
}
1567
+
},
1568
+
"node_modules/tslib": {
1569
+
"version": "2.8.1",
1570
+
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
1571
+
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
1572
+
"dev": true,
1573
+
"license": "0BSD",
1574
+
"optional": true
1575
+
},
1576
+
"node_modules/typescript": {
1577
+
"version": "5.9.3",
1578
+
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
1579
+
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
1580
+
"dev": true,
1581
+
"license": "Apache-2.0",
1582
+
"bin": {
1583
+
"tsc": "bin/tsc",
1584
+
"tsserver": "bin/tsserver"
1585
+
},
1586
+
"engines": {
1587
+
"node": ">=14.17"
1588
+
}
1589
+
},
1590
+
"node_modules/undici": {
1591
+
"version": "7.14.0",
1592
+
"resolved": "https://registry.npmjs.org/undici/-/undici-7.14.0.tgz",
1593
+
"integrity": "sha512-Vqs8HTzjpQXZeXdpsfChQTlafcMQaaIwnGwLam1wudSSjlJeQ3bw1j+TLPePgrCnCpUXx7Ba5Pdpf5OBih62NQ==",
1594
+
"dev": true,
1595
+
"license": "MIT",
1596
+
"engines": {
1597
+
"node": ">=20.18.1"
1598
+
}
1599
+
},
1600
+
"node_modules/unenv": {
1601
+
"version": "2.0.0-rc.24",
1602
+
"resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.24.tgz",
1603
+
"integrity": "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw==",
1604
+
"dev": true,
1605
+
"license": "MIT",
1606
+
"dependencies": {
1607
+
"pathe": "^2.0.3"
1608
+
}
1609
+
},
1610
+
"node_modules/workerd": {
1611
+
"version": "1.20251210.0",
1612
+
"resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20251210.0.tgz",
1613
+
"integrity": "sha512-9MUUneP1BnRE9XAYi94FXxHmiLGbO75EHQZsgWqSiOXjoXSqJCw8aQbIEPxCy19TclEl/kHUFYce8ST2W+Qpjw==",
1614
+
"dev": true,
1615
+
"hasInstallScript": true,
1616
+
"license": "Apache-2.0",
1617
+
"bin": {
1618
+
"workerd": "bin/workerd"
1619
+
},
1620
+
"engines": {
1621
+
"node": ">=16"
1622
+
},
1623
+
"optionalDependencies": {
1624
+
"@cloudflare/workerd-darwin-64": "1.20251210.0",
1625
+
"@cloudflare/workerd-darwin-arm64": "1.20251210.0",
1626
+
"@cloudflare/workerd-linux-64": "1.20251210.0",
1627
+
"@cloudflare/workerd-linux-arm64": "1.20251210.0",
1628
+
"@cloudflare/workerd-windows-64": "1.20251210.0"
1629
+
}
1630
+
},
1631
+
"node_modules/wrangler": {
1632
+
"version": "4.54.0",
1633
+
"resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.54.0.tgz",
1634
+
"integrity": "sha512-bANFsjDwJLbprYoBK+hUDZsVbUv2SqJd8QvArLIcZk+fPq4h/Ohtj5vkKXD3k0s2bD1DXLk08D+hYmeNH+xC6A==",
1635
+
"dev": true,
1636
+
"license": "MIT OR Apache-2.0",
1637
+
"dependencies": {
1638
+
"@cloudflare/kv-asset-handler": "0.4.1",
1639
+
"@cloudflare/unenv-preset": "2.7.13",
1640
+
"blake3-wasm": "2.1.5",
1641
+
"esbuild": "0.27.0",
1642
+
"miniflare": "4.20251210.0",
1643
+
"path-to-regexp": "6.3.0",
1644
+
"unenv": "2.0.0-rc.24",
1645
+
"workerd": "1.20251210.0"
1646
+
},
1647
+
"bin": {
1648
+
"wrangler": "bin/wrangler.js",
1649
+
"wrangler2": "bin/wrangler.js"
1650
+
},
1651
+
"engines": {
1652
+
"node": ">=20.0.0"
1653
+
},
1654
+
"optionalDependencies": {
1655
+
"fsevents": "~2.3.2"
1656
+
},
1657
+
"peerDependencies": {
1658
+
"@cloudflare/workers-types": "^4.20251210.0"
1659
+
},
1660
+
"peerDependenciesMeta": {
1661
+
"@cloudflare/workers-types": {
1662
+
"optional": true
1663
+
}
1664
+
}
1665
+
},
1666
+
"node_modules/ws": {
1667
+
"version": "8.18.0",
1668
+
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz",
1669
+
"integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==",
1670
+
"dev": true,
1671
+
"license": "MIT",
1672
+
"engines": {
1673
+
"node": ">=10.0.0"
1674
+
},
1675
+
"peerDependencies": {
1676
+
"bufferutil": "^4.0.1",
1677
+
"utf-8-validate": ">=5.0.2"
1678
+
},
1679
+
"peerDependenciesMeta": {
1680
+
"bufferutil": {
1681
+
"optional": true
1682
+
},
1683
+
"utf-8-validate": {
1684
+
"optional": true
1685
+
}
1686
+
}
1687
+
},
1688
+
"node_modules/youch": {
1689
+
"version": "4.1.0-beta.10",
1690
+
"resolved": "https://registry.npmjs.org/youch/-/youch-4.1.0-beta.10.tgz",
1691
+
"integrity": "sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ==",
1692
+
"dev": true,
1693
+
"license": "MIT",
1694
+
"dependencies": {
1695
+
"@poppinss/colors": "^4.1.5",
1696
+
"@poppinss/dumper": "^0.6.4",
1697
+
"@speed-highlight/core": "^1.2.7",
1698
+
"cookie": "^1.0.2",
1699
+
"youch-core": "^0.3.3"
1700
+
}
1701
+
},
1702
+
"node_modules/youch-core": {
1703
+
"version": "0.3.3",
1704
+
"resolved": "https://registry.npmjs.org/youch-core/-/youch-core-0.3.3.tgz",
1705
+
"integrity": "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA==",
1706
+
"dev": true,
1707
+
"license": "MIT",
1708
+
"dependencies": {
1709
+
"@poppinss/exception": "^1.2.2",
1710
+
"error-stack-parser-es": "^1.0.5"
1711
+
}
1712
+
},
1713
+
"node_modules/zod": {
1714
+
"version": "3.22.3",
1715
+
"resolved": "https://registry.npmjs.org/zod/-/zod-3.22.3.tgz",
1716
+
"integrity": "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug==",
1717
+
"dev": true,
1718
+
"license": "MIT",
1719
+
"funding": {
1720
+
"url": "https://github.com/sponsors/colinhacks"
1721
+
}
10
1722
}
11
1723
}
12
1724
}
+17
-5
package.json
+17
-5
package.json
···
1
1
{
2
-
"name": "cloudflare-pds",
3
-
"version": "0.1.0",
2
+
"name": "pds.js",
3
+
"version": "0.6.0",
4
4
"private": true,
5
5
"type": "module",
6
6
"scripts": {
7
-
"dev": "wrangler dev",
7
+
"dev": "wrangler dev --persist-to .wrangler/state",
8
+
"dev:remote": "wrangler dev --remote",
8
9
"deploy": "wrangler deploy",
9
-
"test": "node --test test/*.test.js",
10
-
"setup": "node scripts/setup.js"
10
+
"test": "node --test test/pds.test.js",
11
+
"test:e2e": "node --test test/e2e.test.js",
12
+
"setup": "node scripts/setup.js",
13
+
"format": "biome format --write .",
14
+
"lint": "biome lint .",
15
+
"check": "biome check .",
16
+
"typecheck": "tsc --noEmit"
17
+
},
18
+
"devDependencies": {
19
+
"@biomejs/biome": "^2.3.11",
20
+
"@cloudflare/workers-types": "^4.20260103.0",
21
+
"typescript": "^5.9.3",
22
+
"wrangler": "^4.54.0"
11
23
}
12
24
}
+182
-326
scripts/setup.js
+182
-326
scripts/setup.js
···
4
4
* PDS Setup Script
5
5
*
6
6
* Registers a did:plc, initializes the PDS, and notifies the relay.
7
-
* Zero dependencies - uses Node.js built-ins only.
8
7
*
9
8
* Usage: node scripts/setup.js --handle alice --pds https://your-pds.workers.dev
10
9
*/
11
10
12
-
import { webcrypto } from 'crypto'
13
-
import { writeFileSync } from 'fs'
11
+
import { writeFileSync } from 'node:fs';
12
+
import {
13
+
base32Encode,
14
+
base64UrlEncode,
15
+
bytesToHex,
16
+
cborEncodeDagCbor,
17
+
generateKeyPair,
18
+
importPrivateKey,
19
+
sign,
20
+
} from '../src/pds.js';
14
21
15
22
// === ARGUMENT PARSING ===
16
23
17
24
function parseArgs() {
18
-
const args = process.argv.slice(2)
25
+
const args = process.argv.slice(2);
19
26
const opts = {
20
27
handle: null,
21
28
pds: null,
22
29
plcUrl: 'https://plc.directory',
23
-
relayUrl: 'https://bsky.network'
24
-
}
30
+
relayUrl: 'https://bsky.network',
31
+
};
25
32
26
33
for (let i = 0; i < args.length; i++) {
27
34
if (args[i] === '--handle' && args[i + 1]) {
28
-
opts.handle = args[++i]
35
+
opts.handle = args[++i];
29
36
} else if (args[i] === '--pds' && args[i + 1]) {
30
-
opts.pds = args[++i]
37
+
opts.pds = args[++i];
31
38
} else if (args[i] === '--plc-url' && args[i + 1]) {
32
-
opts.plcUrl = args[++i]
39
+
opts.plcUrl = args[++i];
33
40
} else if (args[i] === '--relay-url' && args[i + 1]) {
34
-
opts.relayUrl = args[++i]
41
+
opts.relayUrl = args[++i];
35
42
}
36
43
}
37
44
38
-
if (!opts.handle || !opts.pds) {
39
-
console.error('Usage: node scripts/setup.js --handle <handle> --pds <pds-url>')
40
-
console.error('')
41
-
console.error('Options:')
42
-
console.error(' --handle Handle name (e.g., "alice")')
43
-
console.error(' --pds PDS URL (e.g., "https://atproto-pds.chad-53c.workers.dev")')
44
-
console.error(' --plc-url PLC directory URL (default: https://plc.directory)')
45
-
console.error(' --relay-url Relay URL (default: https://bsky.network)')
46
-
process.exit(1)
45
+
if (!opts.pds) {
46
+
console.error(
47
+
'Usage: node scripts/setup.js --pds <pds-url> [--handle <subdomain>]',
48
+
);
49
+
console.error('');
50
+
console.error('Options:');
51
+
console.error(
52
+
' --pds PDS URL (e.g., "https://atproto-pds.chad-53c.workers.dev")',
53
+
);
54
+
console.error(
55
+
' --handle Subdomain handle (e.g., "alice") - optional, uses bare hostname if omitted',
56
+
);
57
+
console.error(
58
+
' --plc-url PLC directory URL (default: https://plc.directory)',
59
+
);
60
+
console.error(' --relay-url Relay URL (default: https://bsky.network)');
61
+
process.exit(1);
47
62
}
48
63
49
-
return opts
50
-
}
51
-
52
-
// === KEY GENERATION ===
53
-
54
-
async function generateP256Keypair() {
55
-
const keyPair = await webcrypto.subtle.generateKey(
56
-
{ name: 'ECDSA', namedCurve: 'P-256' },
57
-
true,
58
-
['sign', 'verify']
59
-
)
60
-
61
-
// Export private key as raw 32 bytes
62
-
const privateJwk = await webcrypto.subtle.exportKey('jwk', keyPair.privateKey)
63
-
const privateBytes = base64UrlDecode(privateJwk.d)
64
-
65
-
// Export public key as uncompressed point (65 bytes)
66
-
const publicRaw = await webcrypto.subtle.exportKey('raw', keyPair.publicKey)
67
-
const publicBytes = new Uint8Array(publicRaw)
68
-
69
-
// Compress public key to 33 bytes
70
-
const compressedPublic = compressPublicKey(publicBytes)
71
-
72
-
return {
73
-
privateKey: privateBytes,
74
-
publicKey: compressedPublic,
75
-
cryptoKey: keyPair.privateKey
76
-
}
77
-
}
78
-
79
-
function compressPublicKey(uncompressed) {
80
-
// uncompressed is 65 bytes: 0x04 + x(32) + y(32)
81
-
const x = uncompressed.slice(1, 33)
82
-
const y = uncompressed.slice(33, 65)
83
-
const prefix = (y[31] & 1) === 0 ? 0x02 : 0x03
84
-
const compressed = new Uint8Array(33)
85
-
compressed[0] = prefix
86
-
compressed.set(x, 1)
87
-
return compressed
88
-
}
89
-
90
-
function base64UrlDecode(str) {
91
-
const base64 = str.replace(/-/g, '+').replace(/_/g, '/')
92
-
const binary = atob(base64)
93
-
const bytes = new Uint8Array(binary.length)
94
-
for (let i = 0; i < binary.length; i++) {
95
-
bytes[i] = binary.charCodeAt(i)
96
-
}
97
-
return bytes
98
-
}
99
-
100
-
function bytesToHex(bytes) {
101
-
return Array.from(bytes).map(b => b.toString(16).padStart(2, '0')).join('')
64
+
return opts;
102
65
}
103
66
104
67
// === DID:KEY ENCODING ===
105
68
106
69
// Multicodec prefix for P-256 public key (0x1200)
107
-
const P256_MULTICODEC = new Uint8Array([0x80, 0x24])
70
+
const P256_MULTICODEC = new Uint8Array([0x80, 0x24]);
108
71
109
72
function publicKeyToDidKey(compressedPublicKey) {
110
73
// did:key format: "did:key:" + multibase(base58btc) of multicodec + key
111
-
const keyWithCodec = new Uint8Array(P256_MULTICODEC.length + compressedPublicKey.length)
112
-
keyWithCodec.set(P256_MULTICODEC)
113
-
keyWithCodec.set(compressedPublicKey, P256_MULTICODEC.length)
74
+
const keyWithCodec = new Uint8Array(
75
+
P256_MULTICODEC.length + compressedPublicKey.length,
76
+
);
77
+
keyWithCodec.set(P256_MULTICODEC);
78
+
keyWithCodec.set(compressedPublicKey, P256_MULTICODEC.length);
114
79
115
-
return 'did:key:z' + base58btcEncode(keyWithCodec)
80
+
return `did:key:z${base58btcEncode(keyWithCodec)}`;
116
81
}
117
82
118
83
function base58btcEncode(bytes) {
119
-
const ALPHABET = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
84
+
const ALPHABET = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';
120
85
121
86
// Count leading zeros
122
-
let zeros = 0
87
+
let zeros = 0;
123
88
for (const b of bytes) {
124
-
if (b === 0) zeros++
125
-
else break
89
+
if (b === 0) zeros++;
90
+
else break;
126
91
}
127
92
128
93
// Convert to base58
129
-
const digits = [0]
94
+
const digits = [0];
130
95
for (const byte of bytes) {
131
-
let carry = byte
96
+
let carry = byte;
132
97
for (let i = 0; i < digits.length; i++) {
133
-
carry += digits[i] << 8
134
-
digits[i] = carry % 58
135
-
carry = (carry / 58) | 0
98
+
carry += digits[i] << 8;
99
+
digits[i] = carry % 58;
100
+
carry = (carry / 58) | 0;
136
101
}
137
102
while (carry > 0) {
138
-
digits.push(carry % 58)
139
-
carry = (carry / 58) | 0
103
+
digits.push(carry % 58);
104
+
carry = (carry / 58) | 0;
140
105
}
141
106
}
142
107
143
108
// Convert to string
144
-
let result = '1'.repeat(zeros)
109
+
let result = '1'.repeat(zeros);
145
110
for (let i = digits.length - 1; i >= 0; i--) {
146
-
result += ALPHABET[digits[i]]
147
-
}
148
-
149
-
return result
150
-
}
151
-
152
-
// === CBOR ENCODING (dag-cbor compliant for PLC operations) ===
153
-
154
-
function cborEncodeKey(key) {
155
-
// Encode a string key to CBOR bytes (for sorting)
156
-
const bytes = new TextEncoder().encode(key)
157
-
const parts = []
158
-
const mt = 3 << 5 // major type 3 = text string
159
-
if (bytes.length < 24) {
160
-
parts.push(mt | bytes.length)
161
-
} else if (bytes.length < 256) {
162
-
parts.push(mt | 24, bytes.length)
163
-
} else if (bytes.length < 65536) {
164
-
parts.push(mt | 25, bytes.length >> 8, bytes.length & 0xff)
111
+
result += ALPHABET[digits[i]];
165
112
}
166
-
parts.push(...bytes)
167
-
return new Uint8Array(parts)
168
-
}
169
113
170
-
function compareBytes(a, b) {
171
-
// dag-cbor: bytewise lexicographic order of encoded keys
172
-
const minLen = Math.min(a.length, b.length)
173
-
for (let i = 0; i < minLen; i++) {
174
-
if (a[i] !== b[i]) return a[i] - b[i]
175
-
}
176
-
return a.length - b.length
177
-
}
178
-
179
-
function cborEncode(value) {
180
-
const parts = []
181
-
182
-
function encode(val) {
183
-
if (val === null) {
184
-
parts.push(0xf6)
185
-
} else if (typeof val === 'string') {
186
-
const bytes = new TextEncoder().encode(val)
187
-
encodeHead(3, bytes.length)
188
-
parts.push(...bytes)
189
-
} else if (typeof val === 'number') {
190
-
if (Number.isInteger(val) && val >= 0) {
191
-
encodeHead(0, val)
192
-
}
193
-
} else if (val instanceof Uint8Array) {
194
-
encodeHead(2, val.length)
195
-
parts.push(...val)
196
-
} else if (Array.isArray(val)) {
197
-
encodeHead(4, val.length)
198
-
for (const item of val) encode(item)
199
-
} else if (typeof val === 'object') {
200
-
// dag-cbor: sort keys by their CBOR-encoded bytes (length first, then lexicographic)
201
-
const keys = Object.keys(val)
202
-
const keysSorted = keys.sort((a, b) => compareBytes(cborEncodeKey(a), cborEncodeKey(b)))
203
-
encodeHead(5, keysSorted.length)
204
-
for (const key of keysSorted) {
205
-
encode(key)
206
-
encode(val[key])
207
-
}
208
-
}
209
-
}
210
-
211
-
function encodeHead(majorType, length) {
212
-
const mt = majorType << 5
213
-
if (length < 24) {
214
-
parts.push(mt | length)
215
-
} else if (length < 256) {
216
-
parts.push(mt | 24, length)
217
-
} else if (length < 65536) {
218
-
parts.push(mt | 25, length >> 8, length & 0xff)
219
-
}
220
-
}
221
-
222
-
encode(value)
223
-
return new Uint8Array(parts)
114
+
return result;
224
115
}
225
116
226
117
// === HASHING ===
227
118
228
119
async function sha256(data) {
229
-
const hash = await webcrypto.subtle.digest('SHA-256', data)
230
-
return new Uint8Array(hash)
120
+
const hash = await crypto.subtle.digest('SHA-256', data);
121
+
return new Uint8Array(hash);
231
122
}
232
123
233
124
// === PLC OPERATIONS ===
234
125
235
-
async function signPlcOperation(operation, privateKey) {
126
+
async function signPlcOperation(operation, cryptoKey) {
236
127
// Encode operation without sig field
237
-
const { sig, ...opWithoutSig } = operation
238
-
const encoded = cborEncode(opWithoutSig)
239
-
240
-
// Sign with P-256
241
-
const signature = await webcrypto.subtle.sign(
242
-
{ name: 'ECDSA', hash: 'SHA-256' },
243
-
privateKey,
244
-
encoded
245
-
)
246
-
247
-
// Convert to low-S form and base64url encode
248
-
const sigBytes = ensureLowS(new Uint8Array(signature))
249
-
return base64UrlEncode(sigBytes)
250
-
}
251
-
252
-
function ensureLowS(sig) {
253
-
// P-256 order N
254
-
const N = BigInt('0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551')
255
-
const halfN = N / 2n
256
-
257
-
const r = sig.slice(0, 32)
258
-
const s = sig.slice(32, 64)
259
-
260
-
// Convert s to BigInt
261
-
let sInt = BigInt('0x' + bytesToHex(s))
262
-
263
-
// If s > N/2, replace with N - s
264
-
if (sInt > halfN) {
265
-
sInt = N - sInt
266
-
const newS = hexToBytes(sInt.toString(16).padStart(64, '0'))
267
-
const result = new Uint8Array(64)
268
-
result.set(r)
269
-
result.set(newS, 32)
270
-
return result
271
-
}
272
-
273
-
return sig
274
-
}
275
-
276
-
function hexToBytes(hex) {
277
-
const bytes = new Uint8Array(hex.length / 2)
278
-
for (let i = 0; i < hex.length; i += 2) {
279
-
bytes[i / 2] = parseInt(hex.substr(i, 2), 16)
280
-
}
281
-
return bytes
282
-
}
128
+
const { sig, ...opWithoutSig } = operation;
129
+
const encoded = cborEncodeDagCbor(opWithoutSig);
283
130
284
-
function base64UrlEncode(bytes) {
285
-
const binary = String.fromCharCode(...bytes)
286
-
return btoa(binary).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '')
131
+
// Sign with P-256 (sign() handles low-S normalization)
132
+
const signature = await sign(cryptoKey, encoded);
133
+
return base64UrlEncode(signature);
287
134
}
288
135
289
136
async function createGenesisOperation(opts) {
290
-
const { didKey, handle, pdsUrl, cryptoKey } = opts
137
+
const { didKey, handle, pdsUrl, cryptoKey } = opts;
291
138
292
-
// Build the full handle
293
-
const pdsHost = new URL(pdsUrl).host
294
-
const fullHandle = `${handle}.${pdsHost}`
139
+
// Build full handle: subdomain.pds-hostname, or just pds-hostname if no subdomain
140
+
const pdsHost = new URL(pdsUrl).host;
141
+
const fullHandle = handle ? `${handle}.${pdsHost}` : pdsHost;
295
142
296
143
const operation = {
297
144
type: 'plc_operation',
298
145
rotationKeys: [didKey],
299
146
verificationMethods: {
300
-
atproto: didKey
147
+
atproto: didKey,
301
148
},
302
149
alsoKnownAs: [`at://${fullHandle}`],
303
150
services: {
304
151
atproto_pds: {
305
152
type: 'AtprotoPersonalDataServer',
306
-
endpoint: pdsUrl
307
-
}
153
+
endpoint: pdsUrl,
154
+
},
308
155
},
309
-
prev: null
310
-
}
156
+
prev: null,
157
+
};
311
158
312
159
// Sign the operation
313
-
operation.sig = await signPlcOperation(operation, cryptoKey)
160
+
operation.sig = await signPlcOperation(operation, cryptoKey);
314
161
315
-
return { operation, fullHandle }
162
+
return { operation, fullHandle };
316
163
}
317
164
318
165
async function deriveDidFromOperation(operation) {
319
166
// DID is computed from the FULL operation INCLUDING the signature
320
-
const encoded = cborEncode(operation)
321
-
const hash = await sha256(encoded)
167
+
const encoded = cborEncodeDagCbor(operation);
168
+
const hash = await sha256(encoded);
322
169
// DID is base32 of first 15 bytes of hash (= 24 base32 chars)
323
-
return 'did:plc:' + base32Encode(hash.slice(0, 15))
324
-
}
325
-
326
-
function base32Encode(bytes) {
327
-
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
328
-
let result = ''
329
-
let bits = 0
330
-
let value = 0
331
-
332
-
for (const byte of bytes) {
333
-
value = (value << 8) | byte
334
-
bits += 8
335
-
while (bits >= 5) {
336
-
bits -= 5
337
-
result += alphabet[(value >> bits) & 31]
338
-
}
339
-
}
340
-
341
-
if (bits > 0) {
342
-
result += alphabet[(value << (5 - bits)) & 31]
343
-
}
344
-
345
-
return result
170
+
return `did:plc:${base32Encode(hash.slice(0, 15))}`;
346
171
}
347
172
348
173
// === PLC DIRECTORY REGISTRATION ===
349
174
350
175
async function registerWithPlc(plcUrl, did, operation) {
351
-
const url = `${plcUrl}/${encodeURIComponent(did)}`
176
+
const url = `${plcUrl}/${encodeURIComponent(did)}`;
352
177
353
178
const response = await fetch(url, {
354
179
method: 'POST',
355
180
headers: {
356
-
'Content-Type': 'application/json'
181
+
'Content-Type': 'application/json',
357
182
},
358
-
body: JSON.stringify(operation)
359
-
})
183
+
body: JSON.stringify(operation),
184
+
});
360
185
361
186
if (!response.ok) {
362
-
const text = await response.text()
363
-
throw new Error(`PLC registration failed: ${response.status} ${text}`)
187
+
const text = await response.text();
188
+
throw new Error(`PLC registration failed: ${response.status} ${text}`);
364
189
}
365
190
366
-
return true
191
+
return true;
367
192
}
368
193
369
194
// === PDS INITIALIZATION ===
370
195
371
196
async function initializePds(pdsUrl, did, privateKeyHex, handle) {
372
-
const url = `${pdsUrl}/init?did=${encodeURIComponent(did)}`
197
+
const url = `${pdsUrl}/init?did=${encodeURIComponent(did)}`;
373
198
374
199
const response = await fetch(url, {
375
200
method: 'POST',
376
201
headers: {
377
-
'Content-Type': 'application/json'
202
+
'Content-Type': 'application/json',
378
203
},
379
204
body: JSON.stringify({
380
205
did,
381
206
privateKey: privateKeyHex,
382
-
handle
383
-
})
384
-
})
207
+
handle,
208
+
}),
209
+
});
385
210
386
211
if (!response.ok) {
387
-
const text = await response.text()
388
-
throw new Error(`PDS initialization failed: ${response.status} ${text}`)
212
+
const text = await response.text();
213
+
throw new Error(`PDS initialization failed: ${response.status} ${text}`);
389
214
}
390
215
391
-
return response.json()
216
+
return response.json();
217
+
}
218
+
219
+
// === HANDLE REGISTRATION ===
220
+
221
+
async function registerHandle(pdsUrl, handle, did) {
222
+
const url = `${pdsUrl}/register-handle`;
223
+
224
+
const response = await fetch(url, {
225
+
method: 'POST',
226
+
headers: {
227
+
'Content-Type': 'application/json',
228
+
},
229
+
body: JSON.stringify({ handle, did }),
230
+
});
231
+
232
+
if (!response.ok) {
233
+
const text = await response.text();
234
+
throw new Error(`Handle registration failed: ${response.status} ${text}`);
235
+
}
236
+
237
+
return true;
392
238
}
393
239
394
240
// === RELAY NOTIFICATION ===
395
241
396
242
async function notifyRelay(relayUrl, pdsHostname) {
397
-
const url = `${relayUrl}/xrpc/com.atproto.sync.requestCrawl`
243
+
const url = `${relayUrl}/xrpc/com.atproto.sync.requestCrawl`;
398
244
399
245
const response = await fetch(url, {
400
246
method: 'POST',
401
247
headers: {
402
-
'Content-Type': 'application/json'
248
+
'Content-Type': 'application/json',
403
249
},
404
250
body: JSON.stringify({
405
-
hostname: pdsHostname
406
-
})
407
-
})
251
+
hostname: pdsHostname,
252
+
}),
253
+
});
408
254
409
255
// Relay might return 200 or 202, both are OK
410
256
if (!response.ok && response.status !== 202) {
411
-
const text = await response.text()
412
-
console.warn(` Warning: Relay notification returned ${response.status}: ${text}`)
413
-
return false
257
+
const text = await response.text();
258
+
console.warn(
259
+
` Warning: Relay notification returned ${response.status}: ${text}`,
260
+
);
261
+
return false;
414
262
}
415
263
416
-
return true
264
+
return true;
417
265
}
418
266
419
267
// === CREDENTIALS OUTPUT ===
420
268
421
269
function saveCredentials(filename, credentials) {
422
-
writeFileSync(filename, JSON.stringify(credentials, null, 2))
270
+
writeFileSync(filename, JSON.stringify(credentials, null, 2));
423
271
}
424
272
425
273
// === MAIN ===
426
274
427
275
async function main() {
428
-
const opts = parseArgs()
276
+
const opts = parseArgs();
429
277
430
-
console.log('PDS Federation Setup')
431
-
console.log('====================')
432
-
console.log(`Handle: ${opts.handle}`)
433
-
console.log(`PDS: ${opts.pds}`)
434
-
console.log('')
278
+
console.log('PDS Federation Setup');
279
+
console.log('====================');
280
+
console.log(`PDS: ${opts.pds}`);
281
+
console.log('');
435
282
436
283
// Step 1: Generate keypair
437
-
console.log('Generating P-256 keypair...')
438
-
const keyPair = await generateP256Keypair()
439
-
const didKey = publicKeyToDidKey(keyPair.publicKey)
440
-
console.log(` did:key: ${didKey}`)
441
-
console.log('')
284
+
console.log('Generating P-256 keypair...');
285
+
const keyPair = await generateKeyPair();
286
+
const cryptoKey = await importPrivateKey(keyPair.privateKey);
287
+
const didKey = publicKeyToDidKey(keyPair.publicKey);
288
+
console.log(` did:key: ${didKey}`);
289
+
console.log('');
442
290
443
291
// Step 2: Create genesis operation
444
-
console.log('Creating PLC genesis operation...')
292
+
console.log('Creating PLC genesis operation...');
445
293
const { operation, fullHandle } = await createGenesisOperation({
446
294
didKey,
447
295
handle: opts.handle,
448
296
pdsUrl: opts.pds,
449
-
cryptoKey: keyPair.cryptoKey
450
-
})
451
-
const did = await deriveDidFromOperation(operation)
452
-
console.log(` DID: ${did}`)
453
-
console.log(` Handle: ${fullHandle}`)
454
-
console.log('')
297
+
cryptoKey,
298
+
});
299
+
const did = await deriveDidFromOperation(operation);
300
+
console.log(` DID: ${did}`);
301
+
console.log(` Handle: ${fullHandle}`);
302
+
console.log('');
455
303
456
304
// Step 3: Register with PLC directory
457
-
console.log(`Registering with ${opts.plcUrl}...`)
458
-
await registerWithPlc(opts.plcUrl, did, operation)
459
-
console.log(' Registered successfully!')
460
-
console.log('')
305
+
console.log(`Registering with ${opts.plcUrl}...`);
306
+
await registerWithPlc(opts.plcUrl, did, operation);
307
+
console.log(' Registered successfully!');
308
+
console.log('');
461
309
462
310
// Step 4: Initialize PDS
463
-
console.log(`Initializing PDS at ${opts.pds}...`)
464
-
const privateKeyHex = bytesToHex(keyPair.privateKey)
465
-
await initializePds(opts.pds, did, privateKeyHex, fullHandle)
466
-
console.log(' PDS initialized!')
467
-
console.log('')
311
+
console.log(`Initializing PDS at ${opts.pds}...`);
312
+
const privateKeyHex = bytesToHex(keyPair.privateKey);
313
+
await initializePds(opts.pds, did, privateKeyHex, fullHandle);
314
+
console.log(' PDS initialized!');
315
+
console.log('');
316
+
317
+
// Step 4b: Register handle -> DID mapping (only for subdomain handles)
318
+
if (opts.handle) {
319
+
console.log(`Registering handle mapping...`);
320
+
await registerHandle(opts.pds, opts.handle, did);
321
+
console.log(` Handle ${opts.handle} -> ${did}`);
322
+
console.log('');
323
+
}
468
324
469
325
// Step 5: Notify relay
470
-
const pdsHostname = new URL(opts.pds).host
471
-
console.log(`Notifying relay at ${opts.relayUrl}...`)
472
-
const relayOk = await notifyRelay(opts.relayUrl, pdsHostname)
326
+
const pdsHostname = new URL(opts.pds).host;
327
+
console.log(`Notifying relay at ${opts.relayUrl}...`);
328
+
const relayOk = await notifyRelay(opts.relayUrl, pdsHostname);
473
329
if (relayOk) {
474
-
console.log(' Relay notified!')
330
+
console.log(' Relay notified!');
475
331
}
476
-
console.log('')
332
+
console.log('');
477
333
478
334
// Step 6: Save credentials
479
335
const credentials = {
···
482
338
privateKeyHex: bytesToHex(keyPair.privateKey),
483
339
didKey,
484
340
pdsUrl: opts.pds,
485
-
createdAt: new Date().toISOString()
486
-
}
341
+
createdAt: new Date().toISOString(),
342
+
};
487
343
488
-
const credentialsFile = `./credentials-${opts.handle}.json`
489
-
saveCredentials(credentialsFile, credentials)
344
+
const credentialsFile = `./credentials-${opts.handle || new URL(opts.pds).host}.json`;
345
+
saveCredentials(credentialsFile, credentials);
490
346
491
347
// Final output
492
-
console.log('Setup Complete!')
493
-
console.log('===============')
494
-
console.log(`Handle: ${fullHandle}`)
495
-
console.log(`DID: ${did}`)
496
-
console.log(`PDS: ${opts.pds}`)
497
-
console.log('')
498
-
console.log(`Credentials saved to: ${credentialsFile}`)
499
-
console.log('Keep this file safe - it contains your private key!')
348
+
console.log('Setup Complete!');
349
+
console.log('===============');
350
+
console.log(`Handle: ${fullHandle}`);
351
+
console.log(`DID: ${did}`);
352
+
console.log(`PDS: ${opts.pds}`);
353
+
console.log('');
354
+
console.log(`Credentials saved to: ${credentialsFile}`);
355
+
console.log('Keep this file safe - it contains your private key!');
500
356
}
501
357
502
-
main().catch(err => {
503
-
console.error('Error:', err.message)
504
-
process.exit(1)
505
-
})
358
+
main().catch((err) => {
359
+
console.error('Error:', err.message);
360
+
process.exit(1);
361
+
});
+278
scripts/update-did.js
+278
scripts/update-did.js
···
1
+
#!/usr/bin/env node
2
+
3
+
/**
4
+
* Update DID handle and PDS endpoint
5
+
*
6
+
* Usage: node scripts/update-did.js --credentials <file> --new-handle <handle> --new-pds <url>
7
+
*/
8
+
9
+
import { webcrypto } from 'node:crypto';
10
+
import { readFileSync, writeFileSync } from 'node:fs';
11
+
12
+
// === ARGUMENT PARSING ===
13
+
14
+
function parseArgs() {
15
+
const args = process.argv.slice(2);
16
+
const opts = {
17
+
credentials: null,
18
+
newHandle: null,
19
+
newPds: null,
20
+
plcUrl: 'https://plc.directory',
21
+
};
22
+
23
+
for (let i = 0; i < args.length; i++) {
24
+
if (args[i] === '--credentials' && args[i + 1]) {
25
+
opts.credentials = args[++i];
26
+
} else if (args[i] === '--new-handle' && args[i + 1]) {
27
+
opts.newHandle = args[++i];
28
+
} else if (args[i] === '--new-pds' && args[i + 1]) {
29
+
opts.newPds = args[++i];
30
+
} else if (args[i] === '--plc-url' && args[i + 1]) {
31
+
opts.plcUrl = args[++i];
32
+
}
33
+
}
34
+
35
+
if (!opts.credentials || !opts.newHandle || !opts.newPds) {
36
+
console.error(
37
+
'Usage: node scripts/update-did.js --credentials <file> --new-handle <handle> --new-pds <url>',
38
+
);
39
+
process.exit(1);
40
+
}
41
+
42
+
return opts;
43
+
}
44
+
45
+
// === CRYPTO HELPERS ===
46
+
47
+
function hexToBytes(hex) {
48
+
const bytes = new Uint8Array(hex.length / 2);
49
+
for (let i = 0; i < hex.length; i += 2) {
50
+
bytes[i / 2] = parseInt(hex.substr(i, 2), 16);
51
+
}
52
+
return bytes;
53
+
}
54
+
55
+
function bytesToHex(bytes) {
56
+
return Array.from(bytes)
57
+
.map((b) => b.toString(16).padStart(2, '0'))
58
+
.join('');
59
+
}
60
+
61
+
async function importPrivateKey(privateKeyBytes) {
62
+
const pkcs8Prefix = new Uint8Array([
63
+
0x30, 0x41, 0x02, 0x01, 0x00, 0x30, 0x13, 0x06, 0x07, 0x2a, 0x86, 0x48,
64
+
0xce, 0x3d, 0x02, 0x01, 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x03,
65
+
0x01, 0x07, 0x04, 0x27, 0x30, 0x25, 0x02, 0x01, 0x01, 0x04, 0x20,
66
+
]);
67
+
68
+
const pkcs8 = new Uint8Array(pkcs8Prefix.length + 32);
69
+
pkcs8.set(pkcs8Prefix);
70
+
pkcs8.set(privateKeyBytes, pkcs8Prefix.length);
71
+
72
+
return webcrypto.subtle.importKey(
73
+
'pkcs8',
74
+
pkcs8,
75
+
{ name: 'ECDSA', namedCurve: 'P-256' },
76
+
false,
77
+
['sign'],
78
+
);
79
+
}
80
+
81
+
// === CBOR ENCODING ===
82
+
83
+
function cborEncodeKey(key) {
84
+
const bytes = new TextEncoder().encode(key);
85
+
const parts = [];
86
+
const mt = 3 << 5;
87
+
if (bytes.length < 24) {
88
+
parts.push(mt | bytes.length);
89
+
} else if (bytes.length < 256) {
90
+
parts.push(mt | 24, bytes.length);
91
+
}
92
+
parts.push(...bytes);
93
+
return new Uint8Array(parts);
94
+
}
95
+
96
+
function compareBytes(a, b) {
97
+
const minLen = Math.min(a.length, b.length);
98
+
for (let i = 0; i < minLen; i++) {
99
+
if (a[i] !== b[i]) return a[i] - b[i];
100
+
}
101
+
return a.length - b.length;
102
+
}
103
+
104
+
function cborEncode(value) {
105
+
const parts = [];
106
+
107
+
function encode(val) {
108
+
if (val === null) {
109
+
parts.push(0xf6);
110
+
} else if (typeof val === 'string') {
111
+
const bytes = new TextEncoder().encode(val);
112
+
encodeHead(3, bytes.length);
113
+
parts.push(...bytes);
114
+
} else if (typeof val === 'number') {
115
+
if (Number.isInteger(val) && val >= 0) {
116
+
encodeHead(0, val);
117
+
}
118
+
} else if (val instanceof Uint8Array) {
119
+
encodeHead(2, val.length);
120
+
parts.push(...val);
121
+
} else if (Array.isArray(val)) {
122
+
encodeHead(4, val.length);
123
+
for (const item of val) encode(item);
124
+
} else if (typeof val === 'object') {
125
+
const keys = Object.keys(val);
126
+
const keysSorted = keys.sort((a, b) =>
127
+
compareBytes(cborEncodeKey(a), cborEncodeKey(b)),
128
+
);
129
+
encodeHead(5, keysSorted.length);
130
+
for (const key of keysSorted) {
131
+
encode(key);
132
+
encode(val[key]);
133
+
}
134
+
}
135
+
}
136
+
137
+
function encodeHead(majorType, length) {
138
+
const mt = majorType << 5;
139
+
if (length < 24) {
140
+
parts.push(mt | length);
141
+
} else if (length < 256) {
142
+
parts.push(mt | 24, length);
143
+
} else if (length < 65536) {
144
+
parts.push(mt | 25, length >> 8, length & 0xff);
145
+
}
146
+
}
147
+
148
+
encode(value);
149
+
return new Uint8Array(parts);
150
+
}
151
+
152
+
// === SIGNING ===
153
+
154
+
const P256_N = BigInt(
155
+
'0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551',
156
+
);
157
+
158
+
function ensureLowS(sig) {
159
+
const halfN = P256_N / 2n;
160
+
const r = sig.slice(0, 32);
161
+
const s = sig.slice(32, 64);
162
+
let sInt = BigInt(`0x${bytesToHex(s)}`);
163
+
164
+
if (sInt > halfN) {
165
+
sInt = P256_N - sInt;
166
+
const newS = hexToBytes(sInt.toString(16).padStart(64, '0'));
167
+
const result = new Uint8Array(64);
168
+
result.set(r);
169
+
result.set(newS, 32);
170
+
return result;
171
+
}
172
+
return sig;
173
+
}
174
+
175
+
function base64UrlEncode(bytes) {
176
+
const binary = String.fromCharCode(...bytes);
177
+
return btoa(binary)
178
+
.replace(/\+/g, '-')
179
+
.replace(/\//g, '_')
180
+
.replace(/=+$/, '');
181
+
}
182
+
183
+
async function signPlcOperation(operation, privateKey) {
184
+
const { sig, ...opWithoutSig } = operation;
185
+
const encoded = cborEncode(opWithoutSig);
186
+
187
+
const signature = await webcrypto.subtle.sign(
188
+
{ name: 'ECDSA', hash: 'SHA-256' },
189
+
privateKey,
190
+
encoded,
191
+
);
192
+
193
+
const sigBytes = ensureLowS(new Uint8Array(signature));
194
+
return base64UrlEncode(sigBytes);
195
+
}
196
+
197
+
// === MAIN ===
198
+
199
+
async function main() {
200
+
const opts = parseArgs();
201
+
202
+
// Load credentials
203
+
const creds = JSON.parse(readFileSync(opts.credentials, 'utf-8'));
204
+
console.log(`Updating DID: ${creds.did}`);
205
+
console.log(` Old handle: ${creds.handle}`);
206
+
console.log(` New handle: ${opts.newHandle}`);
207
+
console.log(` New PDS: ${opts.newPds}`);
208
+
console.log('');
209
+
210
+
// Fetch current operation log
211
+
console.log('Fetching current PLC operation log...');
212
+
const logRes = await fetch(`${opts.plcUrl}/${creds.did}/log/audit`);
213
+
if (!logRes.ok) {
214
+
throw new Error(`Failed to fetch PLC log: ${logRes.status}`);
215
+
}
216
+
const log = await logRes.json();
217
+
const lastOp = log[log.length - 1];
218
+
console.log(` Found ${log.length} operations`);
219
+
console.log(` Last CID: ${lastOp.cid}`);
220
+
console.log('');
221
+
222
+
// Import private key
223
+
const privateKey = await importPrivateKey(hexToBytes(creds.privateKeyHex));
224
+
225
+
// Create new operation
226
+
const newOp = {
227
+
type: 'plc_operation',
228
+
rotationKeys: lastOp.operation.rotationKeys,
229
+
verificationMethods: lastOp.operation.verificationMethods,
230
+
alsoKnownAs: [`at://${opts.newHandle}`],
231
+
services: {
232
+
atproto_pds: {
233
+
type: 'AtprotoPersonalDataServer',
234
+
endpoint: opts.newPds,
235
+
},
236
+
},
237
+
prev: lastOp.cid,
238
+
};
239
+
240
+
// Sign the operation
241
+
console.log('Signing new operation...');
242
+
newOp.sig = await signPlcOperation(newOp, privateKey);
243
+
244
+
// Submit to PLC
245
+
console.log('Submitting to PLC directory...');
246
+
const submitRes = await fetch(`${opts.plcUrl}/${creds.did}`, {
247
+
method: 'POST',
248
+
headers: { 'Content-Type': 'application/json' },
249
+
body: JSON.stringify(newOp),
250
+
});
251
+
252
+
if (!submitRes.ok) {
253
+
const text = await submitRes.text();
254
+
throw new Error(`PLC update failed: ${submitRes.status} ${text}`);
255
+
}
256
+
257
+
console.log(' Updated successfully!');
258
+
console.log('');
259
+
260
+
// Update credentials file
261
+
creds.handle = opts.newHandle;
262
+
creds.pdsUrl = opts.newPds;
263
+
writeFileSync(opts.credentials, JSON.stringify(creds, null, 2));
264
+
console.log(`Updated credentials file: ${opts.credentials}`);
265
+
266
+
// Verify
267
+
console.log('');
268
+
console.log('Verifying...');
269
+
const verifyRes = await fetch(`${opts.plcUrl}/${creds.did}`);
270
+
const didDoc = await verifyRes.json();
271
+
console.log(` alsoKnownAs: ${didDoc.alsoKnownAs}`);
272
+
console.log(` PDS endpoint: ${didDoc.service[0].serviceEndpoint}`);
273
+
}
274
+
275
+
main().catch((err) => {
276
+
console.error('Error:', err.message);
277
+
process.exit(1);
278
+
});
+4876
-747
src/pds.js
+4876
-747
src/pds.js
···
1
-
// === CONSTANTS ===
1
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
2
+
// โ โ
3
+
// โ โโโโโโโ โโโโโโโ โโโโโโโโ Personal Data Server โ
4
+
// โ โโโโโโโโโโโโโโโโโโโโโโโโ for AT Protocol โ
5
+
// โ โโโโโโโโโโโ โโโโโโโโโโโ โ
6
+
// โ โโโโโโโ โโโ โโโโโโโโโโโ โ
7
+
// โ โโโ โโโโโโโโโโโโโโโโ โ
8
+
// โ โโโ โโโโโโโ โโโโโโโโ โ
9
+
// โ โ
10
+
// โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฃ
11
+
// โ โ
12
+
// โ A single-file ATProto PDS for Cloudflare Workers + Durable Objects โ
13
+
// โ โ
14
+
// โ Features: โ
15
+
// โ โข CBOR/DAG-CBOR encoding for content-addressed data โ
16
+
// โ โข CID generation (CIDv1 with dag-cbor + sha-256) โ
17
+
// โ โข Merkle Search Tree (MST) for repository structure โ
18
+
// โ โข P-256 signing with low-S normalization โ
19
+
// โ โข JWT authentication (access, refresh, service tokens) โ
20
+
// โ โข OAuth 2.0 with DPoP, PKCE, and token management โ
21
+
// โ โข CAR file building for repo sync โ
22
+
// โ โข R2 blob storage with MIME detection โ
23
+
// โ โข SQLite persistence via Durable Objects โ
24
+
// โ โ
25
+
// โ @see https://atproto.com โ
26
+
// โ โ
27
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
28
+
29
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
30
+
// โ TYPES & CONSTANTS โ
31
+
// โ Environment bindings, SQL row types, protocol constants โ
32
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
33
+
34
+
// PDS version (keep in sync with package.json)
35
+
const VERSION = '0.5.0';
36
+
2
37
// CBOR primitive markers (RFC 8949)
3
-
const CBOR_FALSE = 0xf4
4
-
const CBOR_TRUE = 0xf5
5
-
const CBOR_NULL = 0xf6
38
+
const CBOR_FALSE = 0xf4;
39
+
const CBOR_TRUE = 0xf5;
40
+
const CBOR_NULL = 0xf6;
6
41
7
42
// DAG-CBOR CID link tag
8
-
const CBOR_TAG_CID = 42
43
+
const CBOR_TAG_CID = 42;
44
+
45
+
// CID codec constants
46
+
const CODEC_DAG_CBOR = 0x71;
47
+
const CODEC_RAW = 0x55;
48
+
49
+
// TID generation constants
50
+
const TID_CHARS = '234567abcdefghijklmnopqrstuvwxyz';
51
+
let lastTimestamp = 0;
52
+
const clockId = Math.floor(Math.random() * 1024);
53
+
54
+
// P-256 curve order N (for low-S signature normalization)
55
+
const P256_N = BigInt(
56
+
'0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551',
57
+
);
58
+
const P256_N_DIV_2 = P256_N / 2n;
59
+
60
+
// Crawler notification throttle
61
+
const CRAWL_NOTIFY_THRESHOLD = 20 * 60 * 1000; // 20 minutes (matches official PDS)
62
+
let lastCrawlNotify = 0;
63
+
64
+
// Default Bluesky AppView URL
65
+
const BSKY_APPVIEW_URL = 'https://api.bsky.app';
66
+
67
+
/**
68
+
* Cloudflare Workers environment bindings
69
+
* @typedef {Object} Env
70
+
* @property {string} JWT_SECRET - Secret for signing/verifying session JWTs
71
+
* @property {string} [RELAY_HOST] - Relay host to notify of repo updates (e.g., bsky.network)
72
+
* @property {string} [APPVIEW_URL] - AppView URL for proxying app.bsky.* requests
73
+
* @property {string} [APPVIEW_DID] - AppView DID for service auth
74
+
* @property {string} [PDS_PASSWORD] - Password for createSession authentication
75
+
* @property {DurableObjectNamespace} PDS - Durable Object namespace for PDS instances
76
+
* @property {R2Bucket} [BLOB_BUCKET] - R2 bucket for blob storage (legacy name)
77
+
* @property {R2Bucket} [BLOBS] - R2 bucket for blob storage
78
+
*/
79
+
80
+
/**
81
+
* Row from the `blocks` table - stores raw CBOR-encoded data blocks
82
+
* @typedef {Object} BlockRow
83
+
* @property {string} cid - Content ID (CIDv1 base32lower)
84
+
* @property {ArrayBuffer} data - Raw block data (CBOR-encoded)
85
+
*/
86
+
87
+
/**
88
+
* Row from the `records` table - indexes AT Protocol records
89
+
* @typedef {Object} RecordRow
90
+
* @property {string} uri - AT URI (at://did/collection/rkey)
91
+
* @property {string} cid - Content ID of the record block
92
+
* @property {string} collection - Collection NSID (e.g., app.bsky.feed.post)
93
+
* @property {string} rkey - Record key within collection
94
+
* @property {ArrayBuffer} value - CBOR-encoded record value
95
+
*/
96
+
97
+
/**
98
+
* Row from the `commits` table - tracks repo commit history
99
+
* @typedef {Object} CommitRow
100
+
* @property {string} cid - Content ID of the signed commit block
101
+
* @property {string} rev - Revision TID for ordering
102
+
* @property {string|null} prev - Previous commit CID (null for first commit)
103
+
*/
104
+
105
+
/**
106
+
* Row from the `seq_events` table - stores firehose events for subscribeRepos
107
+
* @typedef {Object} SeqEventRow
108
+
* @property {number} seq - Sequence number for cursor-based pagination
109
+
* @property {string} did - DID of the repo that changed
110
+
* @property {string} commit_cid - CID of the commit
111
+
* @property {ArrayBuffer|Uint8Array} evt - CBOR-encoded event with ops, blocks, rev, time
112
+
*/
113
+
114
+
/**
115
+
* Row from the `blobs` table - tracks uploaded blob metadata
116
+
* @typedef {Object} BlobRow
117
+
* @property {string} cid - Content ID of the blob (raw codec)
118
+
* @property {string} mime_type - MIME type (sniffed or from Content-Type header)
119
+
* @property {number} size - Size in bytes
120
+
* @property {string} created_at - ISO timestamp of upload
121
+
*/
122
+
123
+
/**
124
+
* Decoded JWT payload for session tokens
125
+
* @typedef {Object} JwtPayload
126
+
* @property {string} [scope] - Token scope (e.g., "com.atproto.access")
127
+
* @property {string} sub - Subject DID (the authenticated user)
128
+
* @property {string} [aud] - Audience (for refresh tokens, should match sub)
129
+
* @property {number} [iat] - Issued-at timestamp (Unix seconds)
130
+
* @property {number} [exp] - Expiration timestamp (Unix seconds)
131
+
* @property {string} [jti] - Unique token identifier
132
+
*/
133
+
134
+
/**
135
+
* OAuth client metadata from client_id URL
136
+
* @typedef {Object} ClientMetadata
137
+
* @property {string} client_id - The client identifier (must match the URL used to fetch metadata)
138
+
* @property {string} [client_name] - Human-readable client name
139
+
* @property {string[]} redirect_uris - Allowed redirect URIs
140
+
* @property {string[]} grant_types - Supported grant types
141
+
* @property {string[]} response_types - Supported response types
142
+
* @property {string} [token_endpoint_auth_method] - Token endpoint auth method
143
+
* @property {boolean} [dpop_bound_access_tokens] - Whether client requires DPoP-bound tokens
144
+
* @property {string} [scope] - Default scope
145
+
*/
146
+
147
+
/**
148
+
* Parsed and validated DPoP proof
149
+
* @typedef {Object} DpopProofResult
150
+
* @property {string} jkt - The JWK thumbprint of the DPoP key
151
+
* @property {string} jti - The unique identifier from the DPoP proof
152
+
* @property {number} iat - The issued-at timestamp from the DPoP proof
153
+
* @property {{ kty: string, crv: string, x: string, y: string }} jwk - The public key from the proof
154
+
*/
155
+
156
+
/**
157
+
* Parameters for creating a DPoP-bound access token
158
+
* @typedef {Object} AccessTokenParams
159
+
* @property {string} issuer - The PDS issuer URL
160
+
* @property {string} subject - The DID of the authenticated user
161
+
* @property {string} clientId - The OAuth client_id
162
+
* @property {string} scope - The granted scope
163
+
* @property {string} tokenId - Unique token identifier (jti)
164
+
* @property {string} dpopJkt - The DPoP key thumbprint for token binding
165
+
* @property {number} expiresIn - Token lifetime in seconds
166
+
*/
167
+
168
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
169
+
// โ UTILITIES โ
170
+
// โ Error responses, byte conversion, base encoding โ
171
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
172
+
173
+
/**
174
+
* @param {string} error - Error code
175
+
* @param {string} message - Error message
176
+
* @param {number} status - HTTP status code
177
+
* @returns {Response}
178
+
*/
179
+
function errorResponse(error, message, status) {
180
+
return Response.json({ error, message }, { status });
181
+
}
182
+
183
+
/**
184
+
* Parse atproto-proxy header to get service DID and service ID
185
+
* Format: "did:web:api.bsky.app#bsky_appview"
186
+
* @param {string} header
187
+
* @returns {{ did: string, serviceId: string } | null}
188
+
*/
189
+
export function parseAtprotoProxyHeader(header) {
190
+
if (!header) return null;
191
+
const hashIndex = header.indexOf('#');
192
+
if (hashIndex === -1 || hashIndex === 0 || hashIndex === header.length - 1) {
193
+
return null;
194
+
}
195
+
return {
196
+
did: header.slice(0, hashIndex),
197
+
serviceId: header.slice(hashIndex + 1),
198
+
};
199
+
}
200
+
201
+
/**
202
+
* Get URL for a known service DID
203
+
* @param {string} did - Service DID (e.g., "did:web:api.bsky.app")
204
+
* @param {string} serviceId - Service ID (e.g., "bsky_appview")
205
+
* @returns {string | null}
206
+
*/
207
+
export function getKnownServiceUrl(did, serviceId) {
208
+
// Known Bluesky services
209
+
if (did === 'did:web:api.bsky.app' && serviceId === 'bsky_appview') {
210
+
return BSKY_APPVIEW_URL;
211
+
}
212
+
// Add more known services as needed
213
+
return null;
214
+
}
215
+
216
+
/**
217
+
* Proxy a request to a service
218
+
* @param {Request} request - Original request
219
+
* @param {string} serviceUrl - Target service URL (e.g., "https://api.bsky.app")
220
+
* @param {string} [authHeader] - Optional Authorization header
221
+
* @returns {Promise<Response>}
222
+
*/
223
+
async function proxyToService(request, serviceUrl, authHeader) {
224
+
const url = new URL(request.url);
225
+
const targetUrl = new URL(url.pathname + url.search, serviceUrl);
226
+
227
+
const headers = new Headers();
228
+
if (authHeader) {
229
+
headers.set('Authorization', authHeader);
230
+
}
231
+
headers.set(
232
+
'Content-Type',
233
+
request.headers.get('Content-Type') || 'application/json',
234
+
);
235
+
const acceptHeader = request.headers.get('Accept');
236
+
if (acceptHeader) {
237
+
headers.set('Accept', acceptHeader);
238
+
}
239
+
const acceptLangHeader = request.headers.get('Accept-Language');
240
+
if (acceptLangHeader) {
241
+
headers.set('Accept-Language', acceptLangHeader);
242
+
}
243
+
// Forward atproto-specific headers
244
+
const labelersHeader = request.headers.get('atproto-accept-labelers');
245
+
if (labelersHeader) {
246
+
headers.set('atproto-accept-labelers', labelersHeader);
247
+
}
248
+
const topicsHeader = request.headers.get('x-bsky-topics');
249
+
if (topicsHeader) {
250
+
headers.set('x-bsky-topics', topicsHeader);
251
+
}
252
+
253
+
try {
254
+
const response = await fetch(targetUrl.toString(), {
255
+
method: request.method,
256
+
headers,
257
+
body:
258
+
request.method !== 'GET' && request.method !== 'HEAD'
259
+
? request.body
260
+
: undefined,
261
+
});
262
+
const responseHeaders = new Headers(response.headers);
263
+
responseHeaders.set('Access-Control-Allow-Origin', '*');
264
+
return new Response(response.body, {
265
+
status: response.status,
266
+
statusText: response.statusText,
267
+
headers: responseHeaders,
268
+
});
269
+
} catch (err) {
270
+
const message = err instanceof Error ? err.message : String(err);
271
+
return errorResponse(
272
+
'UpstreamFailure',
273
+
`Failed to reach service: ${message}`,
274
+
502,
275
+
);
276
+
}
277
+
}
278
+
279
+
/**
280
+
* Get the default PDS Durable Object stub.
281
+
* @param {Env} env - Environment bindings
282
+
* @returns {{ fetch: (req: Request) => Promise<Response> }} Default PDS stub
283
+
*/
284
+
function getDefaultPds(env) {
285
+
const id = env.PDS.idFromName('default');
286
+
return env.PDS.get(id);
287
+
}
288
+
289
+
/**
290
+
* Parse request body supporting both JSON and form-encoded formats.
291
+
* @param {Request} request - The incoming request
292
+
* @returns {Promise<Record<string, string>>} Parsed body data
293
+
* @throws {Error} If JSON parsing fails
294
+
*/
295
+
async function parseRequestBody(request) {
296
+
const contentType = request.headers.get('content-type') || '';
297
+
const body = await request.text();
298
+
if (contentType.includes('application/json')) {
299
+
return JSON.parse(body);
300
+
}
301
+
const params = new URLSearchParams(body);
302
+
return Object.fromEntries(params.entries());
303
+
}
304
+
305
+
/**
306
+
* Validate that required parameters are present in data object.
307
+
* @param {Record<string, unknown>} data - Data object to validate
308
+
* @param {string[]} required - List of required parameter names
309
+
* @returns {{ valid: true } | { valid: false, missing: string[] }} Validation result
310
+
*/
311
+
function validateRequiredParams(data, required) {
312
+
const missing = required.filter((key) => !data[key]);
313
+
if (missing.length > 0) {
314
+
return { valid: false, missing };
315
+
}
316
+
return { valid: true };
317
+
}
318
+
319
+
/**
320
+
* Convert bytes to hexadecimal string
321
+
* @param {Uint8Array} bytes - Bytes to convert
322
+
* @returns {string} Hex string
323
+
*/
324
+
export function bytesToHex(bytes) {
325
+
return Array.from(bytes)
326
+
.map((b) => b.toString(16).padStart(2, '0'))
327
+
.join('');
328
+
}
329
+
330
+
/**
331
+
* Convert hexadecimal string to bytes
332
+
* @param {string} hex - Hex string
333
+
* @returns {Uint8Array} Decoded bytes
334
+
*/
335
+
export function hexToBytes(hex) {
336
+
const bytes = new Uint8Array(hex.length / 2);
337
+
for (let i = 0; i < hex.length; i += 2) {
338
+
bytes[i / 2] = parseInt(hex.substr(i, 2), 16);
339
+
}
340
+
return bytes;
341
+
}
342
+
343
+
/**
344
+
* @param {Uint8Array} bytes
345
+
* @returns {bigint}
346
+
*/
347
+
function bytesToBigInt(bytes) {
348
+
let result = 0n;
349
+
for (const byte of bytes) {
350
+
result = (result << 8n) | BigInt(byte);
351
+
}
352
+
return result;
353
+
}
354
+
355
+
/**
356
+
* @param {bigint} n
357
+
* @param {number} length
358
+
* @returns {Uint8Array}
359
+
*/
360
+
function bigIntToBytes(n, length) {
361
+
const bytes = new Uint8Array(length);
362
+
for (let i = length - 1; i >= 0; i--) {
363
+
bytes[i] = Number(n & 0xffn);
364
+
n >>= 8n;
365
+
}
366
+
return bytes;
367
+
}
368
+
369
+
/**
370
+
* Encode bytes as base32lower string
371
+
* @param {Uint8Array} bytes - Bytes to encode
372
+
* @returns {string} Base32lower-encoded string
373
+
*/
374
+
export function base32Encode(bytes) {
375
+
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567';
376
+
let result = '';
377
+
let bits = 0;
378
+
let value = 0;
379
+
380
+
for (const byte of bytes) {
381
+
value = (value << 8) | byte;
382
+
bits += 8;
383
+
while (bits >= 5) {
384
+
bits -= 5;
385
+
result += alphabet[(value >> bits) & 31];
386
+
}
387
+
}
388
+
389
+
if (bits > 0) {
390
+
result += alphabet[(value << (5 - bits)) & 31];
391
+
}
392
+
393
+
return result;
394
+
}
395
+
396
+
/**
397
+
* Decode base32lower string to bytes
398
+
* @param {string} str - Base32lower-encoded string
399
+
* @returns {Uint8Array} Decoded bytes
400
+
*/
401
+
export function base32Decode(str) {
402
+
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567';
403
+
let bits = 0;
404
+
let value = 0;
405
+
const output = [];
406
+
407
+
for (const char of str) {
408
+
const idx = alphabet.indexOf(char);
409
+
if (idx === -1) continue;
410
+
value = (value << 5) | idx;
411
+
bits += 5;
412
+
if (bits >= 8) {
413
+
bits -= 8;
414
+
output.push((value >> bits) & 0xff);
415
+
}
416
+
}
417
+
418
+
return new Uint8Array(output);
419
+
}
420
+
421
+
/**
422
+
* Encode bytes as base64url string (no padding)
423
+
* @param {Uint8Array} bytes - Bytes to encode
424
+
* @returns {string} Base64url-encoded string
425
+
*/
426
+
export function base64UrlEncode(bytes) {
427
+
let binary = '';
428
+
for (const byte of bytes) {
429
+
binary += String.fromCharCode(byte);
430
+
}
431
+
const base64 = btoa(binary);
432
+
return base64.replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '');
433
+
}
434
+
435
+
/**
436
+
* Decode base64url string to bytes
437
+
* @param {string} str - Base64url-encoded string
438
+
* @returns {Uint8Array} Decoded bytes
439
+
*/
440
+
export function base64UrlDecode(str) {
441
+
const base64 = str.replace(/-/g, '+').replace(/_/g, '/');
442
+
const pad = base64.length % 4;
443
+
const padded = pad ? base64 + '='.repeat(4 - pad) : base64;
444
+
const binary = atob(padded);
445
+
const bytes = new Uint8Array(binary.length);
446
+
for (let i = 0; i < binary.length; i++) {
447
+
bytes[i] = binary.charCodeAt(i);
448
+
}
449
+
return bytes;
450
+
}
451
+
452
+
/**
453
+
* Timing-safe string comparison using constant-time comparison.
454
+
* Compares hashes of strings to prevent timing attacks.
455
+
* @param {string} a - First string to compare
456
+
* @param {string} b - Second string to compare
457
+
* @returns {Promise<boolean>} True if strings are equal
458
+
*/
459
+
export async function timingSafeEqual(a, b) {
460
+
const encoder = new TextEncoder();
461
+
const aBytes = encoder.encode(a);
462
+
const bBytes = encoder.encode(b);
463
+
464
+
// Hash both to ensure constant-time comparison regardless of length
465
+
const [aHash, bHash] = await Promise.all([
466
+
crypto.subtle.digest('SHA-256', aBytes),
467
+
crypto.subtle.digest('SHA-256', bBytes),
468
+
]);
469
+
470
+
const aArr = new Uint8Array(aHash);
471
+
const bArr = new Uint8Array(bHash);
472
+
473
+
// Constant-time comparison
474
+
let result = 0;
475
+
for (let i = 0; i < aArr.length; i++) {
476
+
result |= aArr[i] ^ bArr[i];
477
+
}
478
+
return result === 0;
479
+
}
480
+
481
+
/**
482
+
* Compute JWK thumbprint (SHA-256) per RFC 7638.
483
+
* Creates a canonical JSON representation of EC key required members
484
+
* and returns the base64url-encoded SHA-256 hash.
485
+
* @param {{ kty: string, crv: string, x: string, y: string }} jwk - The EC public key in JWK format
486
+
* @returns {Promise<string>} The base64url-encoded thumbprint
487
+
*/
488
+
export async function computeJwkThumbprint(jwk) {
489
+
// RFC 7638: members must be in lexicographic order
490
+
const thumbprintInput = JSON.stringify({
491
+
crv: jwk.crv,
492
+
kty: jwk.kty,
493
+
x: jwk.x,
494
+
y: jwk.y,
495
+
});
496
+
const hash = await crypto.subtle.digest(
497
+
'SHA-256',
498
+
new TextEncoder().encode(thumbprintInput),
499
+
);
500
+
return base64UrlEncode(new Uint8Array(hash));
501
+
}
502
+
503
+
/**
504
+
* Check if a client_id represents a loopback client (localhost development).
505
+
* Loopback clients are allowed without pre-registration per AT Protocol OAuth spec.
506
+
* @param {string} clientId - The client_id to check
507
+
* @returns {boolean} True if the client_id is a loopback address
508
+
*/
509
+
export function isLoopbackClient(clientId) {
510
+
try {
511
+
const url = new URL(clientId);
512
+
const host = url.hostname.toLowerCase();
513
+
return host === 'localhost' || host === '127.0.0.1' || host === '[::1]';
514
+
} catch {
515
+
return false;
516
+
}
517
+
}
518
+
519
+
/**
520
+
* Generate permissive client metadata for a loopback client.
521
+
* @param {string} clientId - The loopback client_id
522
+
* @returns {ClientMetadata} Generated client metadata
523
+
*/
524
+
export function getLoopbackClientMetadata(clientId) {
525
+
return {
526
+
client_id: clientId,
527
+
client_name: 'Loopback Client',
528
+
redirect_uris: [clientId],
529
+
grant_types: ['authorization_code', 'refresh_token'],
530
+
response_types: ['code'],
531
+
token_endpoint_auth_method: 'none',
532
+
dpop_bound_access_tokens: true,
533
+
scope: 'atproto',
534
+
};
535
+
}
536
+
537
+
/**
538
+
* Validate client metadata against AT Protocol OAuth requirements.
539
+
* @param {ClientMetadata} metadata - The client metadata to validate
540
+
* @param {string} expectedClientId - The expected client_id (the URL used to fetch metadata)
541
+
* @throws {Error} If validation fails
542
+
*/
543
+
export function validateClientMetadata(metadata, expectedClientId) {
544
+
if (!metadata.client_id) throw new Error('client_id is required');
545
+
if (metadata.client_id !== expectedClientId)
546
+
throw new Error('client_id mismatch');
547
+
if (
548
+
!Array.isArray(metadata.redirect_uris) ||
549
+
metadata.redirect_uris.length === 0
550
+
) {
551
+
throw new Error('redirect_uris is required');
552
+
}
553
+
if (!metadata.grant_types?.includes('authorization_code')) {
554
+
throw new Error('grant_types must include authorization_code');
555
+
}
556
+
}
557
+
558
+
/** @type {Map<string, { metadata: ClientMetadata, expiresAt: number }>} */
559
+
const clientMetadataCache = new Map();
560
+
561
+
/**
562
+
* Fetch and validate client metadata from a client_id URL.
563
+
* Caches results for 10 minutes. Loopback clients return synthetic metadata.
564
+
* @param {string} clientId - The client_id (URL to fetch metadata from)
565
+
* @returns {Promise<ClientMetadata>} The validated client metadata
566
+
* @throws {Error} If fetching or validation fails
567
+
*/
568
+
async function getClientMetadata(clientId) {
569
+
const cached = clientMetadataCache.get(clientId);
570
+
if (cached && Date.now() < cached.expiresAt) return cached.metadata;
571
+
572
+
if (isLoopbackClient(clientId)) {
573
+
const metadata = getLoopbackClientMetadata(clientId);
574
+
clientMetadataCache.set(clientId, {
575
+
metadata,
576
+
expiresAt: Date.now() + 600000,
577
+
});
578
+
return metadata;
579
+
}
580
+
581
+
const response = await fetch(clientId, {
582
+
headers: { Accept: 'application/json' },
583
+
});
584
+
if (!response.ok)
585
+
throw new Error(`Failed to fetch client metadata: ${response.status}`);
586
+
587
+
const metadata = await response.json();
588
+
validateClientMetadata(metadata, clientId);
589
+
clientMetadataCache.set(clientId, {
590
+
metadata,
591
+
expiresAt: Date.now() + 600000,
592
+
});
593
+
return metadata;
594
+
}
595
+
596
+
/**
597
+
* Parse and validate a DPoP proof JWT.
598
+
* Verifies the signature, checks claims (htm, htu, iat, jti), and optionally
599
+
* validates key binding (expectedJkt) and access token hash (ath).
600
+
* @param {string} proof - The DPoP proof JWT
601
+
* @param {string} method - The expected HTTP method (htm claim)
602
+
* @param {string} url - The expected request URL (htu claim)
603
+
* @param {string|null} [expectedJkt=null] - If provided, verify the key matches this thumbprint
604
+
* @param {string|null} [accessToken=null] - If provided, verify the ath claim matches this token's hash
605
+
* @returns {Promise<DpopProofResult>} The parsed proof with jkt, jti, and jwk
606
+
* @throws {Error} If validation fails
607
+
*/
608
+
async function parseDpopProof(
609
+
proof,
610
+
method,
611
+
url,
612
+
expectedJkt = null,
613
+
accessToken = null,
614
+
) {
615
+
const parts = proof.split('.');
616
+
if (parts.length !== 3) throw new Error('Invalid DPoP proof format');
617
+
618
+
const header = JSON.parse(
619
+
new TextDecoder().decode(base64UrlDecode(parts[0])),
620
+
);
621
+
const payload = JSON.parse(
622
+
new TextDecoder().decode(base64UrlDecode(parts[1])),
623
+
);
624
+
625
+
if (header.typ !== 'dpop+jwt')
626
+
throw new Error('DPoP proof must have typ dpop+jwt');
627
+
if (header.alg !== 'ES256') throw new Error('DPoP proof must use ES256');
628
+
if (!header.jwk || header.jwk.kty !== 'EC')
629
+
throw new Error('DPoP proof must contain EC key');
630
+
631
+
// Verify signature
632
+
const publicKey = await crypto.subtle.importKey(
633
+
'jwk',
634
+
header.jwk,
635
+
{ name: 'ECDSA', namedCurve: 'P-256' },
636
+
false,
637
+
['verify'],
638
+
);
639
+
640
+
const signatureInput = new TextEncoder().encode(`${parts[0]}.${parts[1]}`);
641
+
const signature = base64UrlDecode(parts[2]);
642
+
643
+
const valid = await crypto.subtle.verify(
644
+
{ name: 'ECDSA', hash: 'SHA-256' },
645
+
publicKey,
646
+
/** @type {BufferSource} */ (signature),
647
+
/** @type {BufferSource} */ (signatureInput),
648
+
);
649
+
if (!valid) throw new Error('DPoP proof signature invalid');
650
+
651
+
// Validate claims
652
+
if (payload.htm !== method) throw new Error('DPoP htm mismatch');
653
+
654
+
/** @param {string} u */
655
+
const normalizeUrl = (u) => u.replace(/\/$/, '').split('?')[0].toLowerCase();
656
+
if (normalizeUrl(payload.htu) !== normalizeUrl(url))
657
+
throw new Error('DPoP htu mismatch');
658
+
659
+
const now = Math.floor(Date.now() / 1000);
660
+
if (!payload.iat || payload.iat > now + 60 || payload.iat < now - 300) {
661
+
throw new Error('DPoP proof expired or invalid iat');
662
+
}
663
+
664
+
if (!payload.jti) throw new Error('DPoP proof missing jti');
665
+
666
+
const jkt = await computeJwkThumbprint(header.jwk);
667
+
if (expectedJkt && jkt !== expectedJkt) throw new Error('DPoP key mismatch');
668
+
669
+
if (accessToken) {
670
+
const tokenHash = await crypto.subtle.digest(
671
+
'SHA-256',
672
+
new TextEncoder().encode(accessToken),
673
+
);
674
+
const expectedAth = base64UrlEncode(new Uint8Array(tokenHash));
675
+
if (payload.ath !== expectedAth) throw new Error('DPoP ath mismatch');
676
+
}
677
+
678
+
return { jkt, jti: payload.jti, iat: payload.iat, jwk: header.jwk };
679
+
}
680
+
681
+
/**
682
+
* Encode integer as unsigned varint
683
+
* @param {number} n - Non-negative integer
684
+
* @returns {Uint8Array} Varint-encoded bytes
685
+
*/
686
+
export function varint(n) {
687
+
const bytes = [];
688
+
while (n >= 0x80) {
689
+
bytes.push((n & 0x7f) | 0x80);
690
+
n >>>= 7;
691
+
}
692
+
bytes.push(n);
693
+
return new Uint8Array(bytes);
694
+
}
9
695
10
696
// === CID WRAPPER ===
11
697
// Explicit CID type for DAG-CBOR encoding (avoids fragile heuristic detection)
12
698
13
699
class CID {
700
+
/** @param {Uint8Array} bytes */
14
701
constructor(bytes) {
15
702
if (!(bytes instanceof Uint8Array)) {
16
-
throw new Error('CID must be constructed with Uint8Array')
703
+
throw new Error('CID must be constructed with Uint8Array');
17
704
}
18
-
this.bytes = bytes
705
+
this.bytes = bytes;
19
706
}
20
707
}
21
708
22
-
// === CBOR ENCODING ===
23
-
// Minimal deterministic CBOR (RFC 8949) - sorted keys, minimal integers
709
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
710
+
// โ CBOR ENCODING โ
711
+
// โ RFC 8949 CBOR and DAG-CBOR for content-addressed data โ
712
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
24
713
25
714
/**
26
715
* Encode CBOR type header (major type + length)
···
29
718
* @param {number} length - Value or length to encode
30
719
*/
31
720
function encodeHead(parts, majorType, length) {
32
-
const mt = majorType << 5
721
+
const mt = majorType << 5;
33
722
if (length < 24) {
34
-
parts.push(mt | length)
723
+
parts.push(mt | length);
35
724
} else if (length < 256) {
36
-
parts.push(mt | 24, length)
725
+
parts.push(mt | 24, length);
37
726
} else if (length < 65536) {
38
-
parts.push(mt | 25, length >> 8, length & 0xff)
727
+
parts.push(mt | 25, length >> 8, length & 0xff);
39
728
} else if (length < 4294967296) {
40
729
// Use Math.floor instead of bitshift to avoid 32-bit signed integer overflow
41
-
parts.push(mt | 26,
730
+
parts.push(
731
+
mt | 26,
42
732
Math.floor(length / 0x1000000) & 0xff,
43
733
Math.floor(length / 0x10000) & 0xff,
44
734
Math.floor(length / 0x100) & 0xff,
45
-
length & 0xff)
735
+
length & 0xff,
736
+
);
46
737
}
47
738
}
48
739
···
52
743
* @returns {Uint8Array} CBOR-encoded bytes
53
744
*/
54
745
export function cborEncode(value) {
55
-
const parts = []
746
+
/** @type {number[]} */
747
+
const parts = [];
56
748
749
+
/** @param {*} val */
57
750
function encode(val) {
58
751
if (val === null) {
59
-
parts.push(CBOR_NULL)
752
+
parts.push(CBOR_NULL);
60
753
} else if (val === true) {
61
-
parts.push(CBOR_TRUE)
754
+
parts.push(CBOR_TRUE);
62
755
} else if (val === false) {
63
-
parts.push(CBOR_FALSE)
756
+
parts.push(CBOR_FALSE);
64
757
} else if (typeof val === 'number') {
65
-
encodeInteger(val)
758
+
encodeInteger(val);
66
759
} else if (typeof val === 'string') {
67
-
const bytes = new TextEncoder().encode(val)
68
-
encodeHead(parts, 3, bytes.length) // major type 3 = text string
69
-
parts.push(...bytes)
760
+
const bytes = new TextEncoder().encode(val);
761
+
encodeHead(parts, 3, bytes.length); // major type 3 = text string
762
+
parts.push(...bytes);
70
763
} else if (val instanceof Uint8Array) {
71
-
encodeHead(parts, 2, val.length) // major type 2 = byte string
72
-
parts.push(...val)
764
+
encodeHead(parts, 2, val.length); // major type 2 = byte string
765
+
parts.push(...val);
73
766
} else if (Array.isArray(val)) {
74
-
encodeHead(parts, 4, val.length) // major type 4 = array
75
-
for (const item of val) encode(item)
767
+
encodeHead(parts, 4, val.length); // major type 4 = array
768
+
for (const item of val) encode(item);
76
769
} else if (typeof val === 'object') {
77
770
// Sort keys for deterministic encoding
78
-
const keys = Object.keys(val).sort()
79
-
encodeHead(parts, 5, keys.length) // major type 5 = map
771
+
const keys = Object.keys(val).sort();
772
+
encodeHead(parts, 5, keys.length); // major type 5 = map
80
773
for (const key of keys) {
81
-
encode(key)
82
-
encode(val[key])
774
+
encode(key);
775
+
encode(val[key]);
83
776
}
84
777
}
85
778
}
86
779
780
+
/** @param {number} n */
87
781
function encodeInteger(n) {
88
782
if (n >= 0) {
89
-
encodeHead(parts, 0, n) // major type 0 = unsigned int
783
+
encodeHead(parts, 0, n); // major type 0 = unsigned int
90
784
} else {
91
-
encodeHead(parts, 1, -n - 1) // major type 1 = negative int
785
+
encodeHead(parts, 1, -n - 1); // major type 1 = negative int
92
786
}
93
787
}
94
788
95
-
encode(value)
96
-
return new Uint8Array(parts)
789
+
encode(value);
790
+
return new Uint8Array(parts);
97
791
}
98
792
99
-
// DAG-CBOR encoder that handles CIDs with tag 42
100
-
function cborEncodeDagCbor(value) {
101
-
const parts = []
793
+
/**
794
+
* DAG-CBOR encoder that handles CIDs with tag 42
795
+
* @param {*} value
796
+
* @returns {Uint8Array}
797
+
*/
798
+
export function cborEncodeDagCbor(value) {
799
+
/** @type {number[]} */
800
+
const parts = [];
102
801
802
+
/** @param {*} val */
103
803
function encode(val) {
104
804
if (val === null) {
105
-
parts.push(CBOR_NULL)
805
+
parts.push(CBOR_NULL);
106
806
} else if (val === true) {
107
-
parts.push(CBOR_TRUE)
807
+
parts.push(CBOR_TRUE);
108
808
} else if (val === false) {
109
-
parts.push(CBOR_FALSE)
809
+
parts.push(CBOR_FALSE);
110
810
} else if (typeof val === 'number') {
111
811
if (Number.isInteger(val) && val >= 0) {
112
-
encodeHead(parts, 0, val)
812
+
encodeHead(parts, 0, val);
113
813
} else if (Number.isInteger(val) && val < 0) {
114
-
encodeHead(parts, 1, -val - 1)
814
+
encodeHead(parts, 1, -val - 1);
115
815
}
116
816
} else if (typeof val === 'string') {
117
-
const bytes = new TextEncoder().encode(val)
118
-
encodeHead(parts, 3, bytes.length)
119
-
parts.push(...bytes)
817
+
const bytes = new TextEncoder().encode(val);
818
+
encodeHead(parts, 3, bytes.length);
819
+
parts.push(...bytes);
120
820
} else if (val instanceof CID) {
121
821
// CID links in DAG-CBOR use tag 42 + 0x00 multibase prefix
122
822
// The 0x00 prefix indicates "identity" multibase (raw bytes)
123
-
parts.push(0xd8, CBOR_TAG_CID)
124
-
encodeHead(parts, 2, val.bytes.length + 1) // +1 for 0x00 prefix
125
-
parts.push(0x00)
126
-
parts.push(...val.bytes)
823
+
parts.push(0xd8, CBOR_TAG_CID);
824
+
encodeHead(parts, 2, val.bytes.length + 1); // +1 for 0x00 prefix
825
+
parts.push(0x00);
826
+
parts.push(...val.bytes);
127
827
} else if (val instanceof Uint8Array) {
128
828
// Regular byte string
129
-
encodeHead(parts, 2, val.length)
130
-
parts.push(...val)
829
+
encodeHead(parts, 2, val.length);
830
+
parts.push(...val);
131
831
} else if (Array.isArray(val)) {
132
-
encodeHead(parts, 4, val.length)
133
-
for (const item of val) encode(item)
832
+
encodeHead(parts, 4, val.length);
833
+
for (const item of val) encode(item);
134
834
} else if (typeof val === 'object') {
135
835
// DAG-CBOR: sort keys by length first, then lexicographically
136
836
// (differs from standard CBOR which sorts lexicographically only)
137
-
const keys = Object.keys(val).filter(k => val[k] !== undefined)
837
+
const keys = Object.keys(val).filter((k) => val[k] !== undefined);
138
838
keys.sort((a, b) => {
139
-
if (a.length !== b.length) return a.length - b.length
140
-
return a < b ? -1 : a > b ? 1 : 0
141
-
})
142
-
encodeHead(parts, 5, keys.length)
839
+
if (a.length !== b.length) return a.length - b.length;
840
+
return a < b ? -1 : a > b ? 1 : 0;
841
+
});
842
+
encodeHead(parts, 5, keys.length);
143
843
for (const key of keys) {
144
-
const keyBytes = new TextEncoder().encode(key)
145
-
encodeHead(parts, 3, keyBytes.length)
146
-
parts.push(...keyBytes)
147
-
encode(val[key])
844
+
const keyBytes = new TextEncoder().encode(key);
845
+
encodeHead(parts, 3, keyBytes.length);
846
+
parts.push(...keyBytes);
847
+
encode(val[key]);
148
848
}
149
849
}
150
850
}
151
851
152
-
encode(value)
153
-
return new Uint8Array(parts)
852
+
encode(value);
853
+
return new Uint8Array(parts);
154
854
}
155
855
156
856
/**
···
159
859
* @returns {*} Decoded value
160
860
*/
161
861
export function cborDecode(bytes) {
162
-
let offset = 0
862
+
let offset = 0;
163
863
864
+
/** @returns {*} */
164
865
function read() {
165
-
const initial = bytes[offset++]
166
-
const major = initial >> 5
167
-
const info = initial & 0x1f
866
+
const initial = bytes[offset++];
867
+
const major = initial >> 5;
868
+
const info = initial & 0x1f;
168
869
169
-
let length = info
170
-
if (info === 24) length = bytes[offset++]
171
-
else if (info === 25) { length = (bytes[offset++] << 8) | bytes[offset++] }
172
-
else if (info === 26) {
870
+
let length = info;
871
+
if (info === 24) length = bytes[offset++];
872
+
else if (info === 25) {
873
+
length = (bytes[offset++] << 8) | bytes[offset++];
874
+
} else if (info === 26) {
173
875
// Use multiplication instead of bitshift to avoid 32-bit signed integer overflow
174
-
length = bytes[offset++] * 0x1000000 + bytes[offset++] * 0x10000 + bytes[offset++] * 0x100 + bytes[offset++]
876
+
length =
877
+
bytes[offset++] * 0x1000000 +
878
+
bytes[offset++] * 0x10000 +
879
+
bytes[offset++] * 0x100 +
880
+
bytes[offset++];
175
881
}
176
882
177
883
switch (major) {
178
-
case 0: return length // unsigned int
179
-
case 1: return -1 - length // negative int
180
-
case 2: { // byte string
181
-
const data = bytes.slice(offset, offset + length)
182
-
offset += length
183
-
return data
884
+
case 0:
885
+
return length; // unsigned int
886
+
case 1:
887
+
return -1 - length; // negative int
888
+
case 2: {
889
+
// byte string
890
+
const data = bytes.slice(offset, offset + length);
891
+
offset += length;
892
+
return data;
184
893
}
185
-
case 3: { // text string
186
-
const data = new TextDecoder().decode(bytes.slice(offset, offset + length))
187
-
offset += length
188
-
return data
894
+
case 3: {
895
+
// text string
896
+
const data = new TextDecoder().decode(
897
+
bytes.slice(offset, offset + length),
898
+
);
899
+
offset += length;
900
+
return data;
189
901
}
190
-
case 4: { // array
191
-
const arr = []
192
-
for (let i = 0; i < length; i++) arr.push(read())
193
-
return arr
902
+
case 4: {
903
+
// array
904
+
const arr = [];
905
+
for (let i = 0; i < length; i++) arr.push(read());
906
+
return arr;
194
907
}
195
-
case 5: { // map
196
-
const obj = {}
908
+
case 5: {
909
+
// map
910
+
/** @type {Record<string, *>} */
911
+
const obj = {};
197
912
for (let i = 0; i < length; i++) {
198
-
const key = read()
199
-
obj[key] = read()
913
+
const key = /** @type {string} */ (read());
914
+
obj[key] = read();
200
915
}
201
-
return obj
916
+
return obj;
202
917
}
203
-
case 7: { // special
204
-
if (info === 20) return false
205
-
if (info === 21) return true
206
-
if (info === 22) return null
207
-
return undefined
918
+
case 6: {
919
+
// tag
920
+
// length is the tag number
921
+
const taggedValue = read();
922
+
if (length === CBOR_TAG_CID) {
923
+
// CID link: byte string with 0x00 multibase prefix, return raw CID bytes
924
+
return taggedValue.slice(1); // strip 0x00 prefix
925
+
}
926
+
return taggedValue;
927
+
}
928
+
case 7: {
929
+
// special
930
+
if (info === 20) return false;
931
+
if (info === 21) return true;
932
+
if (info === 22) return null;
933
+
return undefined;
208
934
}
209
935
}
210
936
}
211
937
212
-
return read()
938
+
return read();
213
939
}
214
940
215
-
// === CID GENERATION ===
216
-
// dag-cbor (0x71) + sha-256 (0x12) + 32 bytes
941
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
942
+
// โ CONTENT IDENTIFIERS โ
943
+
// โ CIDs (content hashes) and TIDs (timestamp IDs) โ
944
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
217
945
218
946
/**
219
-
* Create a CIDv1 (dag-cbor + sha-256) from raw bytes
947
+
* Create a CIDv1 with SHA-256 hash
220
948
* @param {Uint8Array} bytes - Content to hash
949
+
* @param {number} codec - Codec identifier (0x71 for dag-cbor, 0x55 for raw)
221
950
* @returns {Promise<Uint8Array>} CID bytes (36 bytes: version + codec + multihash)
222
951
*/
223
-
export async function createCid(bytes) {
224
-
const hash = await crypto.subtle.digest('SHA-256', bytes)
225
-
const hashBytes = new Uint8Array(hash)
952
+
async function createCidWithCodec(bytes, codec) {
953
+
const hash = await crypto.subtle.digest(
954
+
'SHA-256',
955
+
/** @type {BufferSource} */ (bytes),
956
+
);
957
+
const hashBytes = new Uint8Array(hash);
226
958
227
-
// CIDv1: version(1) + codec(dag-cbor=0x71) + multihash(sha256)
959
+
// CIDv1: version(1) + codec + multihash(sha256)
228
960
// Multihash: hash-type(0x12) + length(0x20=32) + digest
229
-
const cid = new Uint8Array(2 + 2 + 32)
230
-
cid[0] = 0x01 // CIDv1
231
-
cid[1] = 0x71 // dag-cbor codec
232
-
cid[2] = 0x12 // sha-256
233
-
cid[3] = 0x20 // 32 bytes
234
-
cid.set(hashBytes, 4)
961
+
const cid = new Uint8Array(2 + 2 + 32);
962
+
cid[0] = 0x01; // CIDv1
963
+
cid[1] = codec;
964
+
cid[2] = 0x12; // sha-256
965
+
cid[3] = 0x20; // 32 bytes
966
+
cid.set(hashBytes, 4);
235
967
236
-
return cid
968
+
return cid;
969
+
}
970
+
971
+
/**
972
+
* Create CID for DAG-CBOR encoded data (records, commits)
973
+
* @param {Uint8Array} bytes - DAG-CBOR encoded content
974
+
* @returns {Promise<Uint8Array>} CID bytes
975
+
*/
976
+
export async function createCid(bytes) {
977
+
return createCidWithCodec(bytes, CODEC_DAG_CBOR);
978
+
}
979
+
980
+
/**
981
+
* Create CID for raw blob data (images, videos)
982
+
* @param {Uint8Array} bytes - Raw binary content
983
+
* @returns {Promise<Uint8Array>} CID bytes
984
+
*/
985
+
export async function createBlobCid(bytes) {
986
+
return createCidWithCodec(bytes, CODEC_RAW);
237
987
}
238
988
239
989
/**
···
243
993
*/
244
994
export function cidToString(cid) {
245
995
// base32lower encoding for CIDv1
246
-
return 'b' + base32Encode(cid)
996
+
return `b${base32Encode(cid)}`;
247
997
}
248
998
249
999
/**
250
-
* Encode bytes as base32lower string
251
-
* @param {Uint8Array} bytes - Bytes to encode
252
-
* @returns {string} Base32lower-encoded string
1000
+
* Convert base32lower CID string to raw bytes
1001
+
* @param {string} cidStr - CID string with 'b' prefix
1002
+
* @returns {Uint8Array} CID bytes
253
1003
*/
254
-
export function base32Encode(bytes) {
255
-
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
256
-
let result = ''
257
-
let bits = 0
258
-
let value = 0
259
-
260
-
for (const byte of bytes) {
261
-
value = (value << 8) | byte
262
-
bits += 8
263
-
while (bits >= 5) {
264
-
bits -= 5
265
-
result += alphabet[(value >> bits) & 31]
266
-
}
267
-
}
268
-
269
-
if (bits > 0) {
270
-
result += alphabet[(value << (5 - bits)) & 31]
271
-
}
272
-
273
-
return result
1004
+
export function cidToBytes(cidStr) {
1005
+
// Decode base32lower CID string to bytes
1006
+
if (!cidStr.startsWith('b')) throw new Error('expected base32lower CID');
1007
+
return base32Decode(cidStr.slice(1));
274
1008
}
275
1009
276
-
// === TID GENERATION ===
277
-
// Timestamp-based IDs: base32-sort encoded microseconds + clock ID
278
-
279
-
const TID_CHARS = '234567abcdefghijklmnopqrstuvwxyz'
280
-
let lastTimestamp = 0
281
-
let clockId = Math.floor(Math.random() * 1024)
282
-
283
1010
/**
284
1011
* Generate a timestamp-based ID (TID) for record keys
285
1012
* Monotonic within a process, sortable by time
286
1013
* @returns {string} 13-character base32-sort encoded TID
287
1014
*/
288
1015
export function createTid() {
289
-
let timestamp = Date.now() * 1000 // microseconds
1016
+
let timestamp = Date.now() * 1000; // microseconds
290
1017
291
1018
// Ensure monotonic
292
1019
if (timestamp <= lastTimestamp) {
293
-
timestamp = lastTimestamp + 1
1020
+
timestamp = lastTimestamp + 1;
294
1021
}
295
-
lastTimestamp = timestamp
1022
+
lastTimestamp = timestamp;
296
1023
297
1024
// 13 chars: 11 for timestamp (64 bits but only ~53 used), 2 for clock ID
298
-
let tid = ''
1025
+
let tid = '';
299
1026
300
1027
// Encode timestamp (high bits first for sortability)
301
-
let ts = timestamp
1028
+
let ts = timestamp;
302
1029
for (let i = 0; i < 11; i++) {
303
-
tid = TID_CHARS[ts & 31] + tid
304
-
ts = Math.floor(ts / 32)
1030
+
tid = TID_CHARS[ts & 31] + tid;
1031
+
ts = Math.floor(ts / 32);
305
1032
}
306
1033
307
1034
// Append clock ID (2 chars)
308
-
tid += TID_CHARS[(clockId >> 5) & 31]
309
-
tid += TID_CHARS[clockId & 31]
1035
+
tid += TID_CHARS[(clockId >> 5) & 31];
1036
+
tid += TID_CHARS[clockId & 31];
310
1037
311
-
return tid
1038
+
return tid;
312
1039
}
313
1040
314
-
// === P-256 SIGNING ===
315
-
// Web Crypto ECDSA with P-256 curve
1041
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1042
+
// โ CRYPTOGRAPHY โ
1043
+
// โ P-256 signing with low-S normalization, key management โ
1044
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1045
+
1046
+
/**
1047
+
* @param {BufferSource} data
1048
+
* @returns {Promise<Uint8Array>}
1049
+
*/
1050
+
async function sha256(data) {
1051
+
const hash = await crypto.subtle.digest('SHA-256', data);
1052
+
return new Uint8Array(hash);
1053
+
}
316
1054
317
1055
/**
318
1056
* Import a raw P-256 private key for signing
···
321
1059
*/
322
1060
export async function importPrivateKey(privateKeyBytes) {
323
1061
// Validate private key length (P-256 requires exactly 32 bytes)
324
-
if (!(privateKeyBytes instanceof Uint8Array) || privateKeyBytes.length !== 32) {
325
-
throw new Error(`Invalid private key: expected 32 bytes, got ${privateKeyBytes?.length ?? 'non-Uint8Array'}`)
1062
+
if (
1063
+
!(privateKeyBytes instanceof Uint8Array) ||
1064
+
privateKeyBytes.length !== 32
1065
+
) {
1066
+
throw new Error(
1067
+
`Invalid private key: expected 32 bytes, got ${privateKeyBytes?.length ?? 'non-Uint8Array'}`,
1068
+
);
326
1069
}
327
1070
328
1071
// PKCS#8 wrapper for raw P-256 private key
329
1072
const pkcs8Prefix = new Uint8Array([
330
1073
0x30, 0x41, 0x02, 0x01, 0x00, 0x30, 0x13, 0x06, 0x07, 0x2a, 0x86, 0x48,
331
1074
0xce, 0x3d, 0x02, 0x01, 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x03,
332
-
0x01, 0x07, 0x04, 0x27, 0x30, 0x25, 0x02, 0x01, 0x01, 0x04, 0x20
333
-
])
1075
+
0x01, 0x07, 0x04, 0x27, 0x30, 0x25, 0x02, 0x01, 0x01, 0x04, 0x20,
1076
+
]);
334
1077
335
-
const pkcs8 = new Uint8Array(pkcs8Prefix.length + 32)
336
-
pkcs8.set(pkcs8Prefix)
337
-
pkcs8.set(privateKeyBytes, pkcs8Prefix.length)
1078
+
const pkcs8 = new Uint8Array(pkcs8Prefix.length + 32);
1079
+
pkcs8.set(pkcs8Prefix);
1080
+
pkcs8.set(privateKeyBytes, pkcs8Prefix.length);
338
1081
339
1082
return crypto.subtle.importKey(
340
1083
'pkcs8',
341
-
pkcs8,
1084
+
/** @type {BufferSource} */ (pkcs8),
342
1085
{ name: 'ECDSA', namedCurve: 'P-256' },
343
1086
false,
344
-
['sign']
345
-
)
346
-
}
347
-
348
-
// P-256 curve order N
349
-
const P256_N = BigInt('0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551')
350
-
const P256_N_DIV_2 = P256_N / 2n
351
-
352
-
function bytesToBigInt(bytes) {
353
-
let result = 0n
354
-
for (const byte of bytes) {
355
-
result = (result << 8n) | BigInt(byte)
356
-
}
357
-
return result
358
-
}
359
-
360
-
function bigIntToBytes(n, length) {
361
-
const bytes = new Uint8Array(length)
362
-
for (let i = length - 1; i >= 0; i--) {
363
-
bytes[i] = Number(n & 0xffn)
364
-
n >>= 8n
365
-
}
366
-
return bytes
1087
+
['sign'],
1088
+
);
367
1089
}
368
1090
369
1091
/**
···
376
1098
const signature = await crypto.subtle.sign(
377
1099
{ name: 'ECDSA', hash: 'SHA-256' },
378
1100
privateKey,
379
-
data
380
-
)
381
-
const sig = new Uint8Array(signature)
1101
+
/** @type {BufferSource} */ (data),
1102
+
);
1103
+
const sig = new Uint8Array(signature);
382
1104
383
-
const r = sig.slice(0, 32)
384
-
const s = sig.slice(32, 64)
385
-
const sBigInt = bytesToBigInt(s)
1105
+
const r = sig.slice(0, 32);
1106
+
const s = sig.slice(32, 64);
1107
+
const sBigInt = bytesToBigInt(s);
386
1108
387
1109
// Low-S normalization: Bitcoin/ATProto require S <= N/2 to prevent
388
1110
// signature malleability (two valid signatures for same message)
389
1111
if (sBigInt > P256_N_DIV_2) {
390
-
const newS = P256_N - sBigInt
391
-
const newSBytes = bigIntToBytes(newS, 32)
392
-
const normalized = new Uint8Array(64)
393
-
normalized.set(r, 0)
394
-
normalized.set(newSBytes, 32)
395
-
return normalized
1112
+
const newS = P256_N - sBigInt;
1113
+
const newSBytes = bigIntToBytes(newS, 32);
1114
+
const normalized = new Uint8Array(64);
1115
+
normalized.set(r, 0);
1116
+
normalized.set(newSBytes, 32);
1117
+
return normalized;
396
1118
}
397
1119
398
-
return sig
1120
+
return sig;
399
1121
}
400
1122
401
1123
/**
···
406
1128
const keyPair = await crypto.subtle.generateKey(
407
1129
{ name: 'ECDSA', namedCurve: 'P-256' },
408
1130
true,
409
-
['sign', 'verify']
410
-
)
1131
+
['sign', 'verify'],
1132
+
);
411
1133
412
1134
// Export private key as raw bytes
413
-
const privateJwk = await crypto.subtle.exportKey('jwk', keyPair.privateKey)
414
-
const privateBytes = base64UrlDecode(privateJwk.d)
1135
+
const privateJwk = await crypto.subtle.exportKey('jwk', keyPair.privateKey);
1136
+
const privateBytes = base64UrlDecode(/** @type {string} */ (privateJwk.d));
415
1137
416
1138
// Export public key as compressed point
417
-
const publicRaw = await crypto.subtle.exportKey('raw', keyPair.publicKey)
418
-
const publicBytes = new Uint8Array(publicRaw)
419
-
const compressed = compressPublicKey(publicBytes)
1139
+
const publicRaw = await crypto.subtle.exportKey('raw', keyPair.publicKey);
1140
+
const publicBytes = new Uint8Array(publicRaw);
1141
+
const compressed = compressPublicKey(publicBytes);
420
1142
421
-
return { privateKey: privateBytes, publicKey: compressed }
1143
+
return { privateKey: privateBytes, publicKey: compressed };
422
1144
}
423
1145
1146
+
/**
1147
+
* @param {Uint8Array} uncompressed
1148
+
* @returns {Uint8Array}
1149
+
*/
424
1150
function compressPublicKey(uncompressed) {
425
1151
// uncompressed is 65 bytes: 0x04 + x(32) + y(32)
426
1152
// compressed is 33 bytes: prefix(02 or 03) + x(32)
427
-
const x = uncompressed.slice(1, 33)
428
-
const y = uncompressed.slice(33, 65)
429
-
const prefix = (y[31] & 1) === 0 ? 0x02 : 0x03
430
-
const compressed = new Uint8Array(33)
431
-
compressed[0] = prefix
432
-
compressed.set(x, 1)
433
-
return compressed
1153
+
const x = uncompressed.slice(1, 33);
1154
+
const y = uncompressed.slice(33, 65);
1155
+
const prefix = (y[31] & 1) === 0 ? 0x02 : 0x03;
1156
+
const compressed = new Uint8Array(33);
1157
+
compressed[0] = prefix;
1158
+
compressed.set(x, 1);
1159
+
return compressed;
1160
+
}
1161
+
1162
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1163
+
// โ AUTHENTICATION โ
1164
+
// โ JWT creation/verification for sessions and service auth โ
1165
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1166
+
1167
+
/**
1168
+
* Create HMAC-SHA256 signature for JWT
1169
+
* @param {string} data - Data to sign (header.payload)
1170
+
* @param {string} secret - Secret key
1171
+
* @returns {Promise<string>} Base64url-encoded signature
1172
+
*/
1173
+
async function hmacSign(data, secret) {
1174
+
const key = await crypto.subtle.importKey(
1175
+
'raw',
1176
+
/** @type {BufferSource} */ (new TextEncoder().encode(secret)),
1177
+
{ name: 'HMAC', hash: 'SHA-256' },
1178
+
false,
1179
+
['sign'],
1180
+
);
1181
+
const sig = await crypto.subtle.sign(
1182
+
'HMAC',
1183
+
key,
1184
+
/** @type {BufferSource} */ (new TextEncoder().encode(data)),
1185
+
);
1186
+
return base64UrlEncode(new Uint8Array(sig));
1187
+
}
1188
+
1189
+
/**
1190
+
* Create an access JWT for ATProto
1191
+
* @param {string} did - User's DID (subject and audience)
1192
+
* @param {string} secret - JWT signing secret
1193
+
* @param {number} [expiresIn=7200] - Expiration in seconds (default 2 hours)
1194
+
* @returns {Promise<string>} Signed JWT
1195
+
*/
1196
+
export async function createAccessJwt(did, secret, expiresIn = 7200) {
1197
+
const header = { typ: 'at+jwt', alg: 'HS256' };
1198
+
const now = Math.floor(Date.now() / 1000);
1199
+
const payload = {
1200
+
scope: 'com.atproto.access',
1201
+
sub: did,
1202
+
aud: did,
1203
+
iat: now,
1204
+
exp: now + expiresIn,
1205
+
};
1206
+
1207
+
const headerB64 = base64UrlEncode(
1208
+
new TextEncoder().encode(JSON.stringify(header)),
1209
+
);
1210
+
const payloadB64 = base64UrlEncode(
1211
+
new TextEncoder().encode(JSON.stringify(payload)),
1212
+
);
1213
+
const signature = await hmacSign(`${headerB64}.${payloadB64}`, secret);
1214
+
1215
+
return `${headerB64}.${payloadB64}.${signature}`;
1216
+
}
1217
+
1218
+
/**
1219
+
* Create a refresh JWT for ATProto
1220
+
* @param {string} did - User's DID (subject and audience)
1221
+
* @param {string} secret - JWT signing secret
1222
+
* @param {number} [expiresIn=86400] - Expiration in seconds (default 24 hours)
1223
+
* @returns {Promise<string>} Signed JWT
1224
+
*/
1225
+
export async function createRefreshJwt(did, secret, expiresIn = 86400) {
1226
+
const header = { typ: 'refresh+jwt', alg: 'HS256' };
1227
+
const now = Math.floor(Date.now() / 1000);
1228
+
// Generate random jti (token ID)
1229
+
const jtiBytes = new Uint8Array(32);
1230
+
crypto.getRandomValues(jtiBytes);
1231
+
const jti = base64UrlEncode(jtiBytes);
1232
+
1233
+
const payload = {
1234
+
scope: 'com.atproto.refresh',
1235
+
sub: did,
1236
+
aud: did,
1237
+
jti,
1238
+
iat: now,
1239
+
exp: now + expiresIn,
1240
+
};
1241
+
1242
+
const headerB64 = base64UrlEncode(
1243
+
new TextEncoder().encode(JSON.stringify(header)),
1244
+
);
1245
+
const payloadB64 = base64UrlEncode(
1246
+
new TextEncoder().encode(JSON.stringify(payload)),
1247
+
);
1248
+
const signature = await hmacSign(`${headerB64}.${payloadB64}`, secret);
1249
+
1250
+
return `${headerB64}.${payloadB64}.${signature}`;
434
1251
}
435
1252
436
-
function base64UrlDecode(str) {
437
-
const base64 = str.replace(/-/g, '+').replace(/_/g, '/')
438
-
const binary = atob(base64)
439
-
const bytes = new Uint8Array(binary.length)
440
-
for (let i = 0; i < binary.length; i++) {
441
-
bytes[i] = binary.charCodeAt(i)
1253
+
/**
1254
+
* Verify and decode a JWT (shared logic)
1255
+
* @param {string} jwt - JWT string to verify
1256
+
* @param {string} secret - JWT signing secret
1257
+
* @param {string} expectedType - Expected token type (e.g., 'at+jwt', 'refresh+jwt')
1258
+
* @returns {Promise<{header: {typ: string, alg: string}, payload: JwtPayload}>} Decoded header and payload
1259
+
* @throws {Error} If token is invalid, expired, or wrong type
1260
+
*/
1261
+
async function verifyJwt(jwt, secret, expectedType) {
1262
+
const parts = jwt.split('.');
1263
+
if (parts.length !== 3) {
1264
+
throw new Error('Invalid JWT format');
442
1265
}
443
-
return bytes
1266
+
1267
+
const [headerB64, payloadB64, signatureB64] = parts;
1268
+
1269
+
// Verify signature
1270
+
const expectedSig = await hmacSign(`${headerB64}.${payloadB64}`, secret);
1271
+
if (signatureB64 !== expectedSig) {
1272
+
throw new Error('Invalid signature');
1273
+
}
1274
+
1275
+
// Decode header and payload
1276
+
const header = JSON.parse(
1277
+
new TextDecoder().decode(base64UrlDecode(headerB64)),
1278
+
);
1279
+
const payload = JSON.parse(
1280
+
new TextDecoder().decode(base64UrlDecode(payloadB64)),
1281
+
);
1282
+
1283
+
// Check token type
1284
+
if (header.typ !== expectedType) {
1285
+
throw new Error(`Invalid token type: expected ${expectedType}`);
1286
+
}
1287
+
1288
+
// Check expiration
1289
+
const now = Math.floor(Date.now() / 1000);
1290
+
if (payload.exp && payload.exp < now) {
1291
+
throw new Error('Token expired');
1292
+
}
1293
+
1294
+
return { header, payload };
444
1295
}
445
1296
446
1297
/**
447
-
* Convert bytes to hexadecimal string
448
-
* @param {Uint8Array} bytes - Bytes to convert
449
-
* @returns {string} Hex string
1298
+
* Verify and decode an access JWT
1299
+
* @param {string} jwt - JWT string to verify
1300
+
* @param {string} secret - JWT signing secret
1301
+
* @returns {Promise<JwtPayload>} Decoded payload
1302
+
* @throws {Error} If token is invalid, expired, or wrong type
450
1303
*/
451
-
export function bytesToHex(bytes) {
452
-
return Array.from(bytes).map(b => b.toString(16).padStart(2, '0')).join('')
1304
+
export async function verifyAccessJwt(jwt, secret) {
1305
+
const { payload } = await verifyJwt(jwt, secret, 'at+jwt');
1306
+
return payload;
453
1307
}
454
1308
455
1309
/**
456
-
* Convert hexadecimal string to bytes
457
-
* @param {string} hex - Hex string
458
-
* @returns {Uint8Array} Decoded bytes
1310
+
* Verify and decode a refresh JWT
1311
+
* @param {string} jwt - JWT string to verify
1312
+
* @param {string} secret - JWT signing secret
1313
+
* @returns {Promise<JwtPayload>} Decoded payload
1314
+
* @throws {Error} If token is invalid, expired, or wrong type
459
1315
*/
460
-
export function hexToBytes(hex) {
461
-
const bytes = new Uint8Array(hex.length / 2)
462
-
for (let i = 0; i < hex.length; i += 2) {
463
-
bytes[i / 2] = parseInt(hex.substr(i, 2), 16)
1316
+
export async function verifyRefreshJwt(jwt, secret) {
1317
+
const { payload } = await verifyJwt(jwt, secret, 'refresh+jwt');
1318
+
1319
+
// Validate audience matches subject (token intended for this user)
1320
+
if (payload.aud && payload.aud !== payload.sub) {
1321
+
throw new Error('Invalid audience');
464
1322
}
465
-
return bytes
1323
+
1324
+
return payload;
466
1325
}
467
1326
468
-
// === MERKLE SEARCH TREE ===
469
-
// ATProto-compliant MST implementation
1327
+
/**
1328
+
* Create a service auth JWT signed with ES256 (P-256)
1329
+
* Used for proxying requests to AppView
1330
+
* @param {Object} params - JWT parameters
1331
+
* @param {string} params.iss - Issuer DID (PDS DID)
1332
+
* @param {string} params.aud - Audience DID (AppView DID)
1333
+
* @param {string|null} params.lxm - Lexicon method being called
1334
+
* @param {CryptoKey} params.signingKey - P-256 private key from importPrivateKey
1335
+
* @returns {Promise<string>} Signed JWT
1336
+
*/
1337
+
export async function createServiceJwt({ iss, aud, lxm, signingKey }) {
1338
+
const header = { typ: 'JWT', alg: 'ES256' };
1339
+
const now = Math.floor(Date.now() / 1000);
1340
+
1341
+
// Generate random jti
1342
+
const jtiBytes = new Uint8Array(16);
1343
+
crypto.getRandomValues(jtiBytes);
1344
+
const jti = bytesToHex(jtiBytes);
1345
+
1346
+
/** @type {{ iss: string, aud: string, exp: number, iat: number, jti: string, lxm?: string }} */
1347
+
const payload = {
1348
+
iss,
1349
+
aud,
1350
+
exp: now + 60, // 1 minute expiration
1351
+
iat: now,
1352
+
jti,
1353
+
};
1354
+
if (lxm) payload.lxm = lxm;
1355
+
1356
+
const headerB64 = base64UrlEncode(
1357
+
new TextEncoder().encode(JSON.stringify(header)),
1358
+
);
1359
+
const payloadB64 = base64UrlEncode(
1360
+
new TextEncoder().encode(JSON.stringify(payload)),
1361
+
);
1362
+
const toSign = new TextEncoder().encode(`${headerB64}.${payloadB64}`);
1363
+
1364
+
const sig = await sign(signingKey, toSign);
1365
+
const sigB64 = base64UrlEncode(sig);
470
1366
471
-
async function sha256(data) {
472
-
const hash = await crypto.subtle.digest('SHA-256', data)
473
-
return new Uint8Array(hash)
1367
+
return `${headerB64}.${payloadB64}.${sigB64}`;
474
1368
}
475
1369
1370
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1371
+
// โ MERKLE SEARCH TREE โ
1372
+
// โ MST for ATProto repository structure โ
1373
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1374
+
476
1375
// Cache for key depths (SHA-256 is expensive)
477
-
const keyDepthCache = new Map()
1376
+
const keyDepthCache = new Map();
478
1377
479
1378
/**
480
1379
* Get MST tree depth for a key based on leading zeros in SHA-256 hash
···
483
1382
*/
484
1383
export async function getKeyDepth(key) {
485
1384
// Count leading zeros in SHA-256 hash, divide by 2
486
-
if (keyDepthCache.has(key)) return keyDepthCache.get(key)
1385
+
if (keyDepthCache.has(key)) return keyDepthCache.get(key);
487
1386
488
-
const keyBytes = new TextEncoder().encode(key)
489
-
const hash = await sha256(keyBytes)
1387
+
const keyBytes = new TextEncoder().encode(key);
1388
+
const hash = await sha256(keyBytes);
490
1389
491
-
let zeros = 0
1390
+
let zeros = 0;
492
1391
for (const byte of hash) {
493
1392
if (byte === 0) {
494
-
zeros += 8
1393
+
zeros += 8;
495
1394
} else {
496
1395
// Count leading zeros in this byte
497
1396
for (let i = 7; i >= 0; i--) {
498
-
if ((byte >> i) & 1) break
499
-
zeros++
1397
+
if ((byte >> i) & 1) break;
1398
+
zeros++;
500
1399
}
501
-
break
1400
+
break;
502
1401
}
503
1402
}
504
1403
505
1404
// MST depth = leading zeros in SHA-256 hash / 2
506
1405
// This creates a probabilistic tree where ~50% of keys are at depth 0,
507
1406
// ~25% at depth 1, etc., giving O(log n) lookups
508
-
const depth = Math.floor(zeros / 2)
509
-
keyDepthCache.set(key, depth)
510
-
return depth
1407
+
const depth = Math.floor(zeros / 2);
1408
+
keyDepthCache.set(key, depth);
1409
+
return depth;
511
1410
}
512
1411
513
-
// Compute common prefix length between two byte arrays
1412
+
/**
1413
+
* Compute common prefix length between two byte arrays
1414
+
* @param {Uint8Array} a
1415
+
* @param {Uint8Array} b
1416
+
* @returns {number}
1417
+
*/
514
1418
function commonPrefixLen(a, b) {
515
-
const minLen = Math.min(a.length, b.length)
1419
+
const minLen = Math.min(a.length, b.length);
516
1420
for (let i = 0; i < minLen; i++) {
517
-
if (a[i] !== b[i]) return i
1421
+
if (a[i] !== b[i]) return i;
518
1422
}
519
-
return minLen
1423
+
return minLen;
520
1424
}
521
1425
522
1426
class MST {
1427
+
/** @param {SqlStorage} sql */
523
1428
constructor(sql) {
524
-
this.sql = sql
1429
+
this.sql = sql;
525
1430
}
526
1431
527
1432
async computeRoot() {
528
-
const records = this.sql.exec(`
1433
+
const records = this.sql
1434
+
.exec(`
529
1435
SELECT collection, rkey, cid FROM records ORDER BY collection, rkey
530
-
`).toArray()
1436
+
`)
1437
+
.toArray();
531
1438
532
1439
if (records.length === 0) {
533
-
return null
1440
+
return null;
534
1441
}
535
1442
536
-
// Build entries with pre-computed depths
537
-
const entries = []
1443
+
// Build entries with pre-computed depths (heights)
1444
+
// In ATProto MST, "height" determines which layer a key belongs to
1445
+
// Layer 0 is at the BOTTOM, root is at the highest layer
1446
+
const entries = [];
1447
+
let maxDepth = 0;
538
1448
for (const r of records) {
539
-
const key = `${r.collection}/${r.rkey}`
1449
+
const key = `${r.collection}/${r.rkey}`;
1450
+
const depth = await getKeyDepth(key);
1451
+
maxDepth = Math.max(maxDepth, depth);
540
1452
entries.push({
541
1453
key,
542
1454
keyBytes: new TextEncoder().encode(key),
543
-
cid: r.cid,
544
-
depth: await getKeyDepth(key)
545
-
})
1455
+
cid: /** @type {string} */ (r.cid),
1456
+
depth,
1457
+
});
546
1458
}
547
1459
548
-
return this.buildTree(entries, 0)
1460
+
// Start building from the root (highest layer) going down to layer 0
1461
+
return this.buildTree(entries, maxDepth);
549
1462
}
550
1463
1464
+
/**
1465
+
* @param {Array<{key: string, keyBytes: Uint8Array, cid: string, depth: number}>} entries
1466
+
* @param {number} layer
1467
+
* @returns {Promise<string|null>}
1468
+
*/
551
1469
async buildTree(entries, layer) {
552
-
if (entries.length === 0) return null
1470
+
if (entries.length === 0) return null;
553
1471
554
-
// Separate entries for this layer vs deeper layers
555
-
const thisLayer = []
556
-
let leftSubtree = []
1472
+
// Separate entries for this layer vs lower layers (subtrees)
1473
+
// Keys with depth == layer stay at this node
1474
+
// Keys with depth < layer go into subtrees (going down toward layer 0)
1475
+
/** @type {Array<{type: 'subtree', cid: string|null} | {type: 'entry', entry: {key: string, keyBytes: Uint8Array, cid: string, depth: number}}>} */
1476
+
const thisLayer = [];
1477
+
/** @type {Array<{key: string, keyBytes: Uint8Array, cid: string, depth: number}>} */
1478
+
let leftSubtree = [];
557
1479
558
1480
for (const entry of entries) {
559
-
if (entry.depth > layer) {
560
-
leftSubtree.push(entry)
1481
+
if (entry.depth < layer) {
1482
+
// This entry belongs to a lower layer - accumulate for subtree
1483
+
leftSubtree.push(entry);
561
1484
} else {
562
-
// Process accumulated left subtree
1485
+
// This entry belongs at current layer (depth == layer)
1486
+
// Process accumulated left subtree first
563
1487
if (leftSubtree.length > 0) {
564
-
const leftCid = await this.buildTree(leftSubtree, layer + 1)
565
-
thisLayer.push({ type: 'subtree', cid: leftCid })
566
-
leftSubtree = []
1488
+
const leftCid = await this.buildTree(leftSubtree, layer - 1);
1489
+
thisLayer.push({ type: 'subtree', cid: leftCid });
1490
+
leftSubtree = [];
567
1491
}
568
-
thisLayer.push({ type: 'entry', entry })
1492
+
thisLayer.push({ type: 'entry', entry });
569
1493
}
570
1494
}
571
1495
572
1496
// Handle remaining left subtree
573
1497
if (leftSubtree.length > 0) {
574
-
const leftCid = await this.buildTree(leftSubtree, layer + 1)
575
-
thisLayer.push({ type: 'subtree', cid: leftCid })
1498
+
const leftCid = await this.buildTree(leftSubtree, layer - 1);
1499
+
thisLayer.push({ type: 'subtree', cid: leftCid });
576
1500
}
577
1501
578
1502
// Build node with proper ATProto format
579
-
const node = { e: [] }
580
-
let leftCid = null
581
-
let prevKeyBytes = new Uint8Array(0)
1503
+
/** @type {{ e: Array<{p: number, k: Uint8Array, v: CID, t: CID|null}>, l?: CID|null }} */
1504
+
const node = { e: [] };
1505
+
/** @type {string|null} */
1506
+
let leftCid = null;
1507
+
let prevKeyBytes = new Uint8Array(0);
582
1508
583
1509
for (let i = 0; i < thisLayer.length; i++) {
584
-
const item = thisLayer[i]
1510
+
const item = thisLayer[i];
585
1511
586
1512
if (item.type === 'subtree') {
587
1513
if (node.e.length === 0) {
588
-
leftCid = item.cid
1514
+
leftCid = item.cid;
589
1515
} else {
590
1516
// Attach to previous entry's 't' field
591
-
node.e[node.e.length - 1].t = new CID(cidToBytes(item.cid))
1517
+
if (item.cid !== null) {
1518
+
node.e[node.e.length - 1].t = new CID(cidToBytes(item.cid));
1519
+
}
592
1520
}
593
1521
} else {
594
1522
// Entry - compute prefix compression
595
-
const keyBytes = item.entry.keyBytes
596
-
const prefixLen = commonPrefixLen(prevKeyBytes, keyBytes)
597
-
const keySuffix = keyBytes.slice(prefixLen)
1523
+
const keyBytes = item.entry.keyBytes;
1524
+
const prefixLen = commonPrefixLen(prevKeyBytes, keyBytes);
1525
+
const keySuffix = keyBytes.slice(prefixLen);
598
1526
1527
+
// ATProto requires t field to be present (can be null)
599
1528
const e = {
600
1529
p: prefixLen,
601
1530
k: keySuffix,
602
1531
v: new CID(cidToBytes(item.entry.cid)),
603
-
t: null // Always include t field (set later if subtree exists)
604
-
}
1532
+
t: null, // Will be updated if there's a subtree
1533
+
};
605
1534
606
-
node.e.push(e)
607
-
prevKeyBytes = keyBytes
1535
+
node.e.push(e);
1536
+
prevKeyBytes = /** @type {Uint8Array<ArrayBuffer>} */ (keyBytes);
608
1537
}
609
1538
}
610
1539
611
-
// Always include left pointer (can be null)
612
-
node.l = leftCid ? new CID(cidToBytes(leftCid)) : null
1540
+
// ATProto requires l field to be present (can be null)
1541
+
node.l = leftCid ? new CID(cidToBytes(leftCid)) : null;
613
1542
614
1543
// Encode node with proper MST CBOR format
615
-
const nodeBytes = cborEncodeMstNode(node)
616
-
const nodeCid = await createCid(nodeBytes)
617
-
const cidStr = cidToString(nodeCid)
1544
+
const nodeBytes = cborEncodeDagCbor(node);
1545
+
const nodeCid = await createCid(nodeBytes);
1546
+
const cidStr = cidToString(nodeCid);
618
1547
619
1548
this.sql.exec(
620
1549
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
621
1550
cidStr,
622
-
nodeBytes
623
-
)
1551
+
nodeBytes,
1552
+
);
624
1553
625
-
return cidStr
1554
+
return cidStr;
626
1555
}
627
1556
}
628
1557
629
-
// Special CBOR encoder for MST nodes (CIDs as raw bytes with tag 42)
630
-
function cborEncodeMstNode(node) {
631
-
const parts = []
1558
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1559
+
// โ CAR FILES โ
1560
+
// โ Content Addressable aRchive format for repo sync โ
1561
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
632
1562
633
-
function encode(val) {
634
-
if (val === null || val === undefined) {
635
-
parts.push(CBOR_NULL)
636
-
} else if (typeof val === 'number') {
637
-
encodeHead(parts, 0, val) // unsigned int
638
-
} else if (val instanceof CID) {
639
-
// CID - encode with CBOR tag 42 + 0x00 prefix (DAG-CBOR CID link)
640
-
parts.push(0xd8, CBOR_TAG_CID)
641
-
encodeHead(parts, 2, val.bytes.length + 1) // +1 for 0x00 prefix
642
-
parts.push(0x00) // multibase identity prefix
643
-
parts.push(...val.bytes)
644
-
} else if (val instanceof Uint8Array) {
645
-
// Regular bytes
646
-
encodeHead(parts, 2, val.length)
647
-
parts.push(...val)
648
-
} else if (Array.isArray(val)) {
649
-
encodeHead(parts, 4, val.length)
650
-
for (const item of val) encode(item)
651
-
} else if (typeof val === 'object') {
652
-
// Sort keys for deterministic encoding (DAG-CBOR style)
653
-
// Include null values, only exclude undefined
654
-
const keys = Object.keys(val).filter(k => val[k] !== undefined)
655
-
keys.sort((a, b) => {
656
-
// DAG-CBOR: sort by length first, then lexicographically
657
-
if (a.length !== b.length) return a.length - b.length
658
-
return a < b ? -1 : a > b ? 1 : 0
659
-
})
660
-
encodeHead(parts, 5, keys.length)
661
-
for (const key of keys) {
662
-
// Encode key as text string
663
-
const keyBytes = new TextEncoder().encode(key)
664
-
encodeHead(parts, 3, keyBytes.length)
665
-
parts.push(...keyBytes)
666
-
// Encode value
667
-
encode(val[key])
668
-
}
669
-
}
670
-
}
1563
+
/**
1564
+
* Build a CAR (Content Addressable aRchive) file
1565
+
* @param {string} rootCid - Root CID string
1566
+
* @param {Array<{cid: string, data: Uint8Array}>} blocks - Blocks to include
1567
+
* @returns {Uint8Array} CAR file bytes
1568
+
*/
1569
+
export function buildCarFile(rootCid, blocks) {
1570
+
const parts = [];
671
1571
672
-
encode(node)
673
-
return new Uint8Array(parts)
674
-
}
1572
+
// Header: { version: 1, roots: [rootCid] }
1573
+
const rootCidBytes = cidToBytes(rootCid);
1574
+
const header = cborEncodeDagCbor({
1575
+
version: 1,
1576
+
roots: [new CID(rootCidBytes)],
1577
+
});
1578
+
parts.push(varint(header.length));
1579
+
parts.push(header);
675
1580
676
-
// === CAR FILE BUILDER ===
1581
+
// Blocks: varint(len) + cid + data
1582
+
for (const block of blocks) {
1583
+
const cidBytes = cidToBytes(block.cid);
1584
+
const blockLen = cidBytes.length + block.data.length;
1585
+
parts.push(varint(blockLen));
1586
+
parts.push(cidBytes);
1587
+
parts.push(block.data);
1588
+
}
677
1589
678
-
/**
679
-
* Encode integer as unsigned varint
680
-
* @param {number} n - Non-negative integer
681
-
* @returns {Uint8Array} Varint-encoded bytes
682
-
*/
683
-
export function varint(n) {
684
-
const bytes = []
685
-
while (n >= 0x80) {
686
-
bytes.push((n & 0x7f) | 0x80)
687
-
n >>>= 7
1590
+
// Concatenate all parts
1591
+
const totalLen = parts.reduce((sum, p) => sum + p.length, 0);
1592
+
const car = new Uint8Array(totalLen);
1593
+
let offset = 0;
1594
+
for (const part of parts) {
1595
+
car.set(part, offset);
1596
+
offset += part.length;
688
1597
}
689
-
bytes.push(n)
690
-
return new Uint8Array(bytes)
691
-
}
692
1598
693
-
/**
694
-
* Convert base32lower CID string to raw bytes
695
-
* @param {string} cidStr - CID string with 'b' prefix
696
-
* @returns {Uint8Array} CID bytes
697
-
*/
698
-
export function cidToBytes(cidStr) {
699
-
// Decode base32lower CID string to bytes
700
-
if (!cidStr.startsWith('b')) throw new Error('expected base32lower CID')
701
-
return base32Decode(cidStr.slice(1))
1599
+
return car;
702
1600
}
703
1601
1602
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1603
+
// โ BLOB HANDLING โ
1604
+
// โ MIME detection, blob reference scanning โ
1605
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1606
+
704
1607
/**
705
-
* Decode base32lower string to bytes
706
-
* @param {string} str - Base32lower-encoded string
707
-
* @returns {Uint8Array} Decoded bytes
1608
+
* Sniff MIME type from file magic bytes
1609
+
* @param {Uint8Array|ArrayBuffer} bytes - File bytes (only first 12 needed)
1610
+
* @returns {string|null} Detected MIME type or null if unknown
708
1611
*/
709
-
export function base32Decode(str) {
710
-
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
711
-
let bits = 0
712
-
let value = 0
713
-
const output = []
1612
+
export function sniffMimeType(bytes) {
1613
+
const arr = new Uint8Array(bytes.slice(0, 12));
714
1614
715
-
for (const char of str) {
716
-
const idx = alphabet.indexOf(char)
717
-
if (idx === -1) continue
718
-
value = (value << 5) | idx
719
-
bits += 5
720
-
if (bits >= 8) {
721
-
bits -= 8
722
-
output.push((value >> bits) & 0xff)
723
-
}
1615
+
// JPEG: FF D8 FF
1616
+
if (arr[0] === 0xff && arr[1] === 0xd8 && arr[2] === 0xff) {
1617
+
return 'image/jpeg';
724
1618
}
725
1619
726
-
return new Uint8Array(output)
727
-
}
1620
+
// PNG: 89 50 4E 47 0D 0A 1A 0A
1621
+
if (
1622
+
arr[0] === 0x89 &&
1623
+
arr[1] === 0x50 &&
1624
+
arr[2] === 0x4e &&
1625
+
arr[3] === 0x47 &&
1626
+
arr[4] === 0x0d &&
1627
+
arr[5] === 0x0a &&
1628
+
arr[6] === 0x1a &&
1629
+
arr[7] === 0x0a
1630
+
) {
1631
+
return 'image/png';
1632
+
}
728
1633
729
-
// Encode CAR header with proper DAG-CBOR CID links
730
-
function cborEncodeCarHeader(obj) {
731
-
const parts = []
1634
+
// GIF: 47 49 46 38 (GIF8)
1635
+
if (
1636
+
arr[0] === 0x47 &&
1637
+
arr[1] === 0x49 &&
1638
+
arr[2] === 0x46 &&
1639
+
arr[3] === 0x38
1640
+
) {
1641
+
return 'image/gif';
1642
+
}
732
1643
733
-
function encodeHead(majorType, value) {
734
-
if (value < 24) {
735
-
parts.push((majorType << 5) | value)
736
-
} else if (value < 256) {
737
-
parts.push((majorType << 5) | 24, value)
738
-
} else if (value < 65536) {
739
-
parts.push((majorType << 5) | 25, value >> 8, value & 0xff)
740
-
}
1644
+
// WebP: RIFF....WEBP
1645
+
if (
1646
+
arr[0] === 0x52 &&
1647
+
arr[1] === 0x49 &&
1648
+
arr[2] === 0x46 &&
1649
+
arr[3] === 0x46 &&
1650
+
arr[8] === 0x57 &&
1651
+
arr[9] === 0x45 &&
1652
+
arr[10] === 0x42 &&
1653
+
arr[11] === 0x50
1654
+
) {
1655
+
return 'image/webp';
741
1656
}
742
1657
743
-
function encodeCidLink(cidBytes) {
744
-
// DAG-CBOR CID link: tag(42) + byte string with 0x00 prefix
745
-
parts.push(0xd8, 42) // tag 42
746
-
const withPrefix = new Uint8Array(cidBytes.length + 1)
747
-
withPrefix[0] = 0x00 // multibase identity prefix
748
-
withPrefix.set(cidBytes, 1)
749
-
encodeHead(2, withPrefix.length)
750
-
parts.push(...withPrefix)
1658
+
// ISOBMFF container: ....ftyp at byte 4 (MP4, AVIF, HEIC, etc.)
1659
+
if (
1660
+
arr[4] === 0x66 &&
1661
+
arr[5] === 0x74 &&
1662
+
arr[6] === 0x79 &&
1663
+
arr[7] === 0x70
1664
+
) {
1665
+
// Check brand code at bytes 8-11
1666
+
const brand = String.fromCharCode(arr[8], arr[9], arr[10], arr[11]);
1667
+
if (brand === 'avif') {
1668
+
return 'image/avif';
1669
+
}
1670
+
if (brand === 'heic' || brand === 'heix' || brand === 'mif1') {
1671
+
return 'image/heic';
1672
+
}
1673
+
return 'video/mp4';
751
1674
}
752
1675
753
-
// Encode { roots: [...], version: 1 }
754
-
// Sort keys: "roots" (5 chars) comes after "version" (7 chars)? No - shorter first
755
-
// "roots" = 5 chars, "version" = 7 chars, so "roots" first
756
-
encodeHead(5, 2) // map with 2 entries
1676
+
return null;
1677
+
}
757
1678
758
-
// Key "roots"
759
-
const rootsKey = new TextEncoder().encode('roots')
760
-
encodeHead(3, rootsKey.length)
761
-
parts.push(...rootsKey)
1679
+
/**
1680
+
* Find all blob CID references in a record
1681
+
* @param {*} obj - Record value to scan
1682
+
* @param {string[]} refs - Accumulator array (internal)
1683
+
* @returns {string[]} Array of blob CID strings
1684
+
*/
1685
+
export function findBlobRefs(obj, refs = []) {
1686
+
if (!obj || typeof obj !== 'object') {
1687
+
return refs;
1688
+
}
762
1689
763
-
// Value: array of CID links
764
-
encodeHead(4, obj.roots.length)
765
-
for (const cid of obj.roots) {
766
-
encodeCidLink(cid)
1690
+
// Check if this object is a blob ref
1691
+
if (obj.$type === 'blob' && obj.ref?.$link) {
1692
+
refs.push(obj.ref.$link);
767
1693
}
768
1694
769
-
// Key "version"
770
-
const versionKey = new TextEncoder().encode('version')
771
-
encodeHead(3, versionKey.length)
772
-
parts.push(...versionKey)
773
-
774
-
// Value: 1
775
-
parts.push(0x01)
1695
+
// Recurse into arrays and objects
1696
+
if (Array.isArray(obj)) {
1697
+
for (const item of obj) {
1698
+
findBlobRefs(item, refs);
1699
+
}
1700
+
} else {
1701
+
for (const value of Object.values(obj)) {
1702
+
findBlobRefs(value, refs);
1703
+
}
1704
+
}
776
1705
777
-
return new Uint8Array(parts)
1706
+
return refs;
778
1707
}
1708
+
1709
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1710
+
// โ RELAY NOTIFICATION โ
1711
+
// โ Notify relays to crawl after repo updates โ
1712
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
779
1713
780
1714
/**
781
-
* Build a CAR (Content Addressable aRchive) file
782
-
* @param {string} rootCid - Root CID string
783
-
* @param {Array<{cid: string, data: Uint8Array}>} blocks - Blocks to include
784
-
* @returns {Uint8Array} CAR file bytes
1715
+
* Notify relays to come crawl us after writes (like official PDS)
1716
+
* @param {{ RELAY_HOST?: string }} env
1717
+
* @param {string} hostname
785
1718
*/
786
-
export function buildCarFile(rootCid, blocks) {
787
-
const parts = []
788
-
789
-
// Header: { version: 1, roots: [rootCid] }
790
-
// CIDs in header must be DAG-CBOR links (tag 42 + 0x00 prefix + CID bytes)
791
-
const rootCidBytes = cidToBytes(rootCid)
792
-
const header = cborEncodeCarHeader({ version: 1, roots: [rootCidBytes] })
793
-
parts.push(varint(header.length))
794
-
parts.push(header)
795
-
796
-
// Blocks: varint(len) + cid + data
797
-
for (const block of blocks) {
798
-
const cidBytes = cidToBytes(block.cid)
799
-
const blockLen = cidBytes.length + block.data.length
800
-
parts.push(varint(blockLen))
801
-
parts.push(cidBytes)
802
-
parts.push(block.data)
1719
+
async function notifyCrawlers(env, hostname) {
1720
+
const now = Date.now();
1721
+
if (now - lastCrawlNotify < CRAWL_NOTIFY_THRESHOLD) {
1722
+
return; // Throttle notifications
803
1723
}
804
1724
805
-
// Concatenate all parts
806
-
const totalLen = parts.reduce((sum, p) => sum + p.length, 0)
807
-
const car = new Uint8Array(totalLen)
808
-
let offset = 0
809
-
for (const part of parts) {
810
-
car.set(part, offset)
811
-
offset += part.length
812
-
}
1725
+
const relayHost = env.RELAY_HOST;
1726
+
if (!relayHost) return;
813
1727
814
-
return car
1728
+
lastCrawlNotify = now;
1729
+
1730
+
// Fire and forget - don't block writes on relay notification
1731
+
fetch(`${relayHost}/xrpc/com.atproto.sync.requestCrawl`, {
1732
+
method: 'POST',
1733
+
headers: { 'Content-Type': 'application/json' },
1734
+
body: JSON.stringify({ hostname }),
1735
+
}).catch(() => {
1736
+
// Silently ignore relay notification failures
1737
+
});
815
1738
}
816
1739
1740
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1741
+
// โ ROUTING โ
1742
+
// โ XRPC endpoint definitions โ
1743
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1744
+
817
1745
/**
818
1746
* Route handler function type
819
1747
* @callback RouteHandler
820
1748
* @param {PersonalDataServer} pds - PDS instance
821
1749
* @param {Request} request - HTTP request
822
1750
* @param {URL} url - Parsed URL
823
-
* @returns {Promise<Response>} HTTP response
1751
+
* @returns {Response | Promise<Response>} HTTP response
824
1752
*/
825
1753
826
1754
/**
1755
+
* Route definition for the PDS router
827
1756
* @typedef {Object} Route
828
1757
* @property {string} [method] - Required HTTP method (default: any)
829
1758
* @property {RouteHandler} handler - Handler function
···
832
1761
/** @type {Record<string, Route>} */
833
1762
const pdsRoutes = {
834
1763
'/.well-known/atproto-did': {
835
-
handler: (pds, req, url) => pds.handleAtprotoDid()
1764
+
handler: (pds, _req, _url) => pds.handleAtprotoDid(),
836
1765
},
837
1766
'/init': {
838
1767
method: 'POST',
839
-
handler: (pds, req, url) => pds.handleInit(req)
1768
+
handler: (pds, req, _url) => pds.handleInit(req),
840
1769
},
841
1770
'/status': {
842
-
handler: (pds, req, url) => pds.handleStatus()
843
-
},
844
-
'/reset-repo': {
845
-
handler: (pds, req, url) => pds.handleResetRepo()
1771
+
handler: (pds, _req, _url) => pds.handleStatus(),
846
1772
},
847
1773
'/forward-event': {
848
-
handler: (pds, req, url) => pds.handleForwardEvent(req)
1774
+
handler: (pds, req, _url) => pds.handleForwardEvent(req),
849
1775
},
850
1776
'/register-did': {
851
-
handler: (pds, req, url) => pds.handleRegisterDid(req)
1777
+
handler: (pds, req, _url) => pds.handleRegisterDid(req),
852
1778
},
853
1779
'/get-registered-dids': {
854
-
handler: (pds, req, url) => pds.handleGetRegisteredDids()
1780
+
handler: (pds, _req, _url) => pds.handleGetRegisteredDids(),
1781
+
},
1782
+
'/register-handle': {
1783
+
method: 'POST',
1784
+
handler: (pds, req, _url) => pds.handleRegisterHandle(req),
1785
+
},
1786
+
'/resolve-handle': {
1787
+
handler: (pds, _req, url) => pds.handleResolveHandle(url),
855
1788
},
856
1789
'/repo-info': {
857
-
handler: (pds, req, url) => pds.handleRepoInfo()
1790
+
handler: (pds, _req, _url) => pds.handleRepoInfo(),
1791
+
},
1792
+
'/oauth-public-key': {
1793
+
handler: async (pds) => Response.json(await pds.getPublicKeyJwk()),
1794
+
},
1795
+
'/check-dpop-jti': {
1796
+
method: 'POST',
1797
+
handler: async (pds, req) => {
1798
+
const { jti, iat } = await req.json();
1799
+
const fresh = pds.checkAndStoreDpopJti(jti, iat);
1800
+
return Response.json({ fresh });
1801
+
},
858
1802
},
859
1803
'/xrpc/com.atproto.server.describeServer': {
860
-
handler: (pds, req, url) => pds.handleDescribeServer(req)
1804
+
handler: (pds, req, _url) => pds.handleDescribeServer(req),
1805
+
},
1806
+
'/xrpc/com.atproto.server.createSession': {
1807
+
method: 'POST',
1808
+
handler: (pds, req, _url) => pds.handleCreateSession(req),
1809
+
},
1810
+
'/xrpc/com.atproto.server.getSession': {
1811
+
handler: (pds, req, _url) => pds.handleGetSession(req),
1812
+
},
1813
+
'/xrpc/com.atproto.server.refreshSession': {
1814
+
method: 'POST',
1815
+
handler: (pds, req, _url) => pds.handleRefreshSession(req),
1816
+
},
1817
+
'/xrpc/app.bsky.actor.getPreferences': {
1818
+
handler: (pds, req, _url) => pds.handleGetPreferences(req),
1819
+
},
1820
+
'/xrpc/app.bsky.actor.putPreferences': {
1821
+
method: 'POST',
1822
+
handler: (pds, req, _url) => pds.handlePutPreferences(req),
861
1823
},
862
1824
'/xrpc/com.atproto.sync.listRepos': {
863
-
handler: (pds, req, url) => pds.handleListRepos()
1825
+
handler: (pds, _req, _url) => pds.handleListRepos(),
864
1826
},
865
1827
'/xrpc/com.atproto.repo.createRecord': {
866
1828
method: 'POST',
867
-
handler: (pds, req, url) => pds.handleCreateRecord(req)
1829
+
handler: (pds, req, _url) => pds.handleCreateRecord(req),
1830
+
},
1831
+
'/xrpc/com.atproto.repo.deleteRecord': {
1832
+
method: 'POST',
1833
+
handler: (pds, req, _url) => pds.handleDeleteRecord(req),
1834
+
},
1835
+
'/xrpc/com.atproto.repo.putRecord': {
1836
+
method: 'POST',
1837
+
handler: (pds, req, _url) => pds.handlePutRecord(req),
1838
+
},
1839
+
'/xrpc/com.atproto.repo.applyWrites': {
1840
+
method: 'POST',
1841
+
handler: (pds, req, _url) => pds.handleApplyWrites(req),
868
1842
},
869
1843
'/xrpc/com.atproto.repo.getRecord': {
870
-
handler: (pds, req, url) => pds.handleGetRecord(url)
1844
+
handler: (pds, _req, url) => pds.handleGetRecord(url),
1845
+
},
1846
+
'/xrpc/com.atproto.repo.describeRepo': {
1847
+
handler: (pds, _req, _url) => pds.handleDescribeRepo(),
1848
+
},
1849
+
'/xrpc/com.atproto.repo.listRecords': {
1850
+
handler: (pds, _req, url) => pds.handleListRecords(url),
1851
+
},
1852
+
'/xrpc/com.atproto.repo.uploadBlob': {
1853
+
method: 'POST',
1854
+
handler: (pds, req, _url) => pds.handleUploadBlob(req),
871
1855
},
872
1856
'/xrpc/com.atproto.sync.getLatestCommit': {
873
-
handler: (pds, req, url) => pds.handleGetLatestCommit()
1857
+
handler: (pds, _req, _url) => pds.handleGetLatestCommit(),
874
1858
},
875
1859
'/xrpc/com.atproto.sync.getRepoStatus': {
876
-
handler: (pds, req, url) => pds.handleGetRepoStatus()
1860
+
handler: (pds, _req, _url) => pds.handleGetRepoStatus(),
877
1861
},
878
1862
'/xrpc/com.atproto.sync.getRepo': {
879
-
handler: (pds, req, url) => pds.handleGetRepo()
1863
+
handler: (pds, _req, _url) => pds.handleGetRepo(),
1864
+
},
1865
+
'/xrpc/com.atproto.sync.getRecord': {
1866
+
handler: (pds, _req, url) => pds.handleSyncGetRecord(url),
1867
+
},
1868
+
'/xrpc/com.atproto.sync.getBlob': {
1869
+
handler: (pds, _req, url) => pds.handleGetBlob(url),
1870
+
},
1871
+
'/xrpc/com.atproto.sync.listBlobs': {
1872
+
handler: (pds, _req, url) => pds.handleListBlobs(url),
880
1873
},
881
1874
'/xrpc/com.atproto.sync.subscribeRepos': {
882
-
handler: (pds, req, url) => pds.handleSubscribeRepos(req, url)
883
-
}
884
-
}
1875
+
handler: (pds, req, url) => pds.handleSubscribeRepos(req, url),
1876
+
},
1877
+
// OAuth endpoints
1878
+
'/.well-known/oauth-authorization-server': {
1879
+
handler: (pds, _req, url) => pds.handleOAuthAuthServerMetadata(url),
1880
+
},
1881
+
'/.well-known/oauth-protected-resource': {
1882
+
handler: (pds, _req, url) => pds.handleOAuthProtectedResource(url),
1883
+
},
1884
+
'/oauth/jwks': {
1885
+
handler: (pds, _req, _url) => pds.handleOAuthJwks(),
1886
+
},
1887
+
'/oauth/par': {
1888
+
method: 'POST',
1889
+
handler: (pds, req, url) => pds.handleOAuthPar(req, url),
1890
+
},
1891
+
'/oauth/authorize': {
1892
+
handler: (pds, req, url) => pds.handleOAuthAuthorize(req, url),
1893
+
},
1894
+
'/oauth/token': {
1895
+
method: 'POST',
1896
+
handler: (pds, req, url) => pds.handleOAuthToken(req, url),
1897
+
},
1898
+
'/oauth/revoke': {
1899
+
method: 'POST',
1900
+
handler: (pds, req, url) => pds.handleOAuthRevoke(req, url),
1901
+
},
1902
+
};
1903
+
1904
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
1905
+
// โ PERSONAL DATA SERVER โ
1906
+
// โ Durable Object class implementing ATProto PDS โ
1907
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
885
1908
886
1909
export class PersonalDataServer {
1910
+
/** @type {string | undefined} */
1911
+
_did;
1912
+
1913
+
/**
1914
+
* @param {DurableObjectState} state
1915
+
* @param {Env} env
1916
+
*/
887
1917
constructor(state, env) {
888
-
this.state = state
889
-
this.sql = state.storage.sql
890
-
this.env = env
1918
+
this.state = state;
1919
+
this.sql = state.storage.sql;
1920
+
this.env = env;
891
1921
892
1922
// Initialize schema
893
1923
this.sql.exec(`
···
918
1948
evt BLOB NOT NULL
919
1949
);
920
1950
1951
+
CREATE TABLE IF NOT EXISTS blobs (
1952
+
cid TEXT PRIMARY KEY,
1953
+
mime_type TEXT NOT NULL,
1954
+
size INTEGER NOT NULL,
1955
+
created_at TEXT NOT NULL
1956
+
);
1957
+
1958
+
CREATE TABLE IF NOT EXISTS record_blobs (
1959
+
blob_cid TEXT NOT NULL,
1960
+
record_uri TEXT NOT NULL,
1961
+
PRIMARY KEY (blob_cid, record_uri)
1962
+
);
1963
+
1964
+
CREATE INDEX IF NOT EXISTS idx_record_blobs_record_uri ON record_blobs(record_uri);
1965
+
921
1966
CREATE INDEX IF NOT EXISTS idx_records_collection ON records(collection, rkey);
922
-
`)
1967
+
1968
+
CREATE TABLE IF NOT EXISTS authorization_requests (
1969
+
id TEXT PRIMARY KEY,
1970
+
client_id TEXT NOT NULL,
1971
+
client_metadata TEXT NOT NULL,
1972
+
parameters TEXT NOT NULL,
1973
+
code TEXT,
1974
+
code_challenge TEXT,
1975
+
code_challenge_method TEXT,
1976
+
dpop_jkt TEXT,
1977
+
did TEXT,
1978
+
expires_at TEXT NOT NULL,
1979
+
created_at TEXT NOT NULL
1980
+
);
1981
+
1982
+
CREATE INDEX IF NOT EXISTS idx_authorization_requests_code
1983
+
ON authorization_requests(code) WHERE code IS NOT NULL;
1984
+
1985
+
CREATE TABLE IF NOT EXISTS tokens (
1986
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
1987
+
token_id TEXT UNIQUE NOT NULL,
1988
+
did TEXT NOT NULL,
1989
+
client_id TEXT NOT NULL,
1990
+
scope TEXT,
1991
+
dpop_jkt TEXT,
1992
+
expires_at TEXT NOT NULL,
1993
+
refresh_token TEXT UNIQUE,
1994
+
created_at TEXT NOT NULL,
1995
+
updated_at TEXT NOT NULL
1996
+
);
1997
+
1998
+
CREATE INDEX IF NOT EXISTS idx_tokens_did ON tokens(did);
1999
+
2000
+
CREATE TABLE IF NOT EXISTS dpop_jtis (
2001
+
jti TEXT PRIMARY KEY,
2002
+
expires_at TEXT NOT NULL
2003
+
);
2004
+
2005
+
CREATE INDEX IF NOT EXISTS idx_dpop_jtis_expires ON dpop_jtis(expires_at);
2006
+
`);
923
2007
}
924
2008
2009
+
/**
2010
+
* @param {string} did
2011
+
* @param {string} privateKeyHex
2012
+
* @param {string|null} [handle]
2013
+
*/
925
2014
async initIdentity(did, privateKeyHex, handle = null) {
926
-
await this.state.storage.put('did', did)
927
-
await this.state.storage.put('privateKey', privateKeyHex)
2015
+
await this.state.storage.put('did', did);
2016
+
await this.state.storage.put('privateKey', privateKeyHex);
928
2017
if (handle) {
929
-
await this.state.storage.put('handle', handle)
2018
+
await this.state.storage.put('handle', handle);
2019
+
}
2020
+
2021
+
// Schedule blob cleanup alarm (runs daily)
2022
+
const currentAlarm = await this.state.storage.getAlarm();
2023
+
if (!currentAlarm) {
2024
+
await this.state.storage.setAlarm(Date.now() + 24 * 60 * 60 * 1000);
930
2025
}
931
2026
}
932
2027
933
2028
async getDid() {
934
2029
if (!this._did) {
935
-
this._did = await this.state.storage.get('did')
2030
+
this._did = await this.state.storage.get('did');
936
2031
}
937
-
return this._did
2032
+
return this._did;
938
2033
}
939
2034
940
2035
async getHandle() {
941
-
return this.state.storage.get('handle')
2036
+
return this.state.storage.get('handle');
942
2037
}
943
2038
944
2039
async getSigningKey() {
945
-
const hex = await this.state.storage.get('privateKey')
946
-
if (!hex) return null
947
-
return importPrivateKey(hexToBytes(hex))
2040
+
const hex = await this.state.storage.get('privateKey');
2041
+
if (!hex) return null;
2042
+
return importPrivateKey(hexToBytes(/** @type {string} */ (hex)));
948
2043
}
949
2044
950
-
// Collect MST node blocks for a given root CID
2045
+
/**
2046
+
* Collect MST node blocks for a given root CID
2047
+
* @param {string} rootCidStr
2048
+
* @returns {Array<{cid: string, data: Uint8Array}>}
2049
+
*/
951
2050
collectMstBlocks(rootCidStr) {
952
-
const blocks = []
953
-
const visited = new Set()
2051
+
/** @type {Array<{cid: string, data: Uint8Array}>} */
2052
+
const blocks = [];
2053
+
const visited = new Set();
954
2054
2055
+
/** @param {string} cidStr */
955
2056
const collect = (cidStr) => {
956
-
if (visited.has(cidStr)) return
957
-
visited.add(cidStr)
2057
+
if (visited.has(cidStr)) return;
2058
+
visited.add(cidStr);
958
2059
959
-
const rows = this.sql.exec(
960
-
`SELECT data FROM blocks WHERE cid = ?`, cidStr
961
-
).toArray()
962
-
if (rows.length === 0) return
2060
+
const rows = /** @type {BlockRow[]} */ (
2061
+
this.sql.exec(`SELECT data FROM blocks WHERE cid = ?`, cidStr).toArray()
2062
+
);
2063
+
if (rows.length === 0) return;
963
2064
964
-
const data = new Uint8Array(rows[0].data)
965
-
blocks.push({ cid: cidStr, data }) // Keep as string, buildCarFile will convert
2065
+
const data = new Uint8Array(rows[0].data);
2066
+
blocks.push({ cid: cidStr, data }); // Keep as string, buildCarFile will convert
966
2067
967
2068
// Decode and follow child CIDs (MST nodes have 'l' and 'e' with 't' subtrees)
968
2069
try {
969
-
const node = cborDecode(data)
970
-
if (node.l) collect(cidToString(node.l))
2070
+
const node = cborDecode(data);
2071
+
if (node.l) collect(cidToString(node.l));
971
2072
if (node.e) {
972
2073
for (const entry of node.e) {
973
-
if (entry.t) collect(cidToString(entry.t))
2074
+
if (entry.t) collect(cidToString(entry.t));
974
2075
}
975
2076
}
976
-
} catch (e) {
2077
+
} catch (_e) {
977
2078
// Not an MST node, ignore
978
2079
}
979
-
}
2080
+
};
980
2081
981
-
collect(rootCidStr)
982
-
return blocks
2082
+
collect(rootCidStr);
2083
+
return blocks;
983
2084
}
984
2085
2086
+
/**
2087
+
* @param {string} collection
2088
+
* @param {Record<string, *>} record
2089
+
* @param {string|null} [rkey]
2090
+
* @returns {Promise<{uri: string, cid: string, commit: string}>}
2091
+
*/
985
2092
async createRecord(collection, record, rkey = null) {
986
-
const did = await this.getDid()
987
-
if (!did) throw new Error('PDS not initialized')
2093
+
const did = await this.getDid();
2094
+
if (!did) throw new Error('PDS not initialized');
988
2095
989
-
rkey = rkey || createTid()
990
-
const uri = `at://${did}/${collection}/${rkey}`
2096
+
rkey = rkey || createTid();
2097
+
const uri = `at://${did}/${collection}/${rkey}`;
991
2098
992
-
// Encode and hash record
993
-
const recordBytes = cborEncode(record)
994
-
const recordCid = await createCid(recordBytes)
995
-
const recordCidStr = cidToString(recordCid)
2099
+
// Encode and hash record (must use DAG-CBOR for proper key ordering)
2100
+
const recordBytes = cborEncodeDagCbor(record);
2101
+
const recordCid = await createCid(recordBytes);
2102
+
const recordCidStr = cidToString(recordCid);
996
2103
997
2104
// Store block
998
2105
this.sql.exec(
999
2106
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
1000
-
recordCidStr, recordBytes
1001
-
)
2107
+
recordCidStr,
2108
+
recordBytes,
2109
+
);
1002
2110
1003
2111
// Store record index
1004
2112
this.sql.exec(
1005
2113
`INSERT OR REPLACE INTO records (uri, cid, collection, rkey, value) VALUES (?, ?, ?, ?, ?)`,
1006
-
uri, recordCidStr, collection, rkey, recordBytes
1007
-
)
2114
+
uri,
2115
+
recordCidStr,
2116
+
collection,
2117
+
rkey,
2118
+
recordBytes,
2119
+
);
2120
+
2121
+
// Associate blobs with this record (delete old associations first for updates)
2122
+
this.sql.exec('DELETE FROM record_blobs WHERE record_uri = ?', uri);
2123
+
2124
+
const blobRefs = findBlobRefs(record);
2125
+
for (const blobCid of blobRefs) {
2126
+
// Verify blob exists
2127
+
const blobExists = this.sql
2128
+
.exec('SELECT cid FROM blobs WHERE cid = ?', blobCid)
2129
+
.toArray();
2130
+
2131
+
if (blobExists.length === 0) {
2132
+
throw new Error(`BlobNotFound: ${blobCid}`);
2133
+
}
2134
+
2135
+
// Create association
2136
+
this.sql.exec(
2137
+
'INSERT INTO record_blobs (blob_cid, record_uri) VALUES (?, ?)',
2138
+
blobCid,
2139
+
uri,
2140
+
);
2141
+
}
1008
2142
1009
2143
// Rebuild MST
1010
-
const mst = new MST(this.sql)
1011
-
const dataRoot = await mst.computeRoot()
2144
+
const mst = new MST(this.sql);
2145
+
const dataRoot = await mst.computeRoot();
1012
2146
1013
2147
// Get previous commit
1014
-
const prevCommits = this.sql.exec(
1015
-
`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`
1016
-
).toArray()
1017
-
const prevCommit = prevCommits.length > 0 ? prevCommits[0] : null
2148
+
const prevCommits = this.sql
2149
+
.exec(`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`)
2150
+
.toArray();
2151
+
const prevCommit = prevCommits.length > 0 ? prevCommits[0] : null;
1018
2152
1019
2153
// Create commit
1020
-
const rev = createTid()
2154
+
const rev = createTid();
1021
2155
// Build commit with CIDs wrapped in CID class (for dag-cbor tag 42 encoding)
1022
2156
const commit = {
1023
2157
did,
1024
2158
version: 3,
1025
-
data: new CID(cidToBytes(dataRoot)), // CID wrapped for explicit encoding
2159
+
data: new CID(cidToBytes(/** @type {string} */ (dataRoot))), // CID wrapped for explicit encoding
1026
2160
rev,
1027
-
prev: prevCommit?.cid ? new CID(cidToBytes(prevCommit.cid)) : null
1028
-
}
2161
+
prev: prevCommit?.cid
2162
+
? new CID(cidToBytes(/** @type {string} */ (prevCommit.cid)))
2163
+
: null,
2164
+
};
1029
2165
1030
2166
// Sign commit (using dag-cbor encoder for CIDs)
1031
-
const commitBytes = cborEncodeDagCbor(commit)
1032
-
const signingKey = await this.getSigningKey()
1033
-
const sig = await sign(signingKey, commitBytes)
2167
+
const commitBytes = cborEncodeDagCbor(commit);
2168
+
const signingKey = await this.getSigningKey();
2169
+
if (!signingKey) throw new Error('No signing key');
2170
+
const sig = await sign(signingKey, commitBytes);
1034
2171
1035
-
const signedCommit = { ...commit, sig }
1036
-
const signedBytes = cborEncodeDagCbor(signedCommit)
1037
-
const commitCid = await createCid(signedBytes)
1038
-
const commitCidStr = cidToString(commitCid)
2172
+
const signedCommit = { ...commit, sig };
2173
+
const signedBytes = cborEncodeDagCbor(signedCommit);
2174
+
const commitCid = await createCid(signedBytes);
2175
+
const commitCidStr = cidToString(commitCid);
1039
2176
1040
2177
// Store commit block
1041
2178
this.sql.exec(
1042
2179
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
1043
-
commitCidStr, signedBytes
1044
-
)
2180
+
commitCidStr,
2181
+
signedBytes,
2182
+
);
1045
2183
1046
2184
// Store commit reference
1047
2185
this.sql.exec(
1048
2186
`INSERT INTO commits (cid, rev, prev) VALUES (?, ?, ?)`,
1049
-
commitCidStr, rev, prevCommit?.cid || null
1050
-
)
2187
+
commitCidStr,
2188
+
rev,
2189
+
prevCommit?.cid || null,
2190
+
);
1051
2191
1052
2192
// Update head and rev for listRepos
1053
-
await this.state.storage.put('head', commitCidStr)
1054
-
await this.state.storage.put('rev', rev)
2193
+
await this.state.storage.put('head', commitCidStr);
2194
+
await this.state.storage.put('rev', rev);
1055
2195
1056
2196
// Collect blocks for the event (record + commit + MST nodes)
1057
2197
// Build a mini CAR with just the new blocks - use string CIDs
1058
-
const newBlocks = []
2198
+
const newBlocks = [];
1059
2199
// Add record block
1060
-
newBlocks.push({ cid: recordCidStr, data: recordBytes })
2200
+
newBlocks.push({ cid: recordCidStr, data: recordBytes });
1061
2201
// Add commit block
1062
-
newBlocks.push({ cid: commitCidStr, data: signedBytes })
2202
+
newBlocks.push({ cid: commitCidStr, data: signedBytes });
1063
2203
// Add MST node blocks (get all blocks referenced by commit.data)
1064
-
const mstBlocks = this.collectMstBlocks(dataRoot)
1065
-
newBlocks.push(...mstBlocks)
2204
+
const mstBlocks = this.collectMstBlocks(/** @type {string} */ (dataRoot));
2205
+
newBlocks.push(...mstBlocks);
1066
2206
1067
2207
// Sequence event with blocks - store complete event data including rev and time
1068
2208
// blocks must be a full CAR file with header (roots = [commitCid])
1069
-
const eventTime = new Date().toISOString()
2209
+
const eventTime = new Date().toISOString();
1070
2210
const evt = cborEncode({
1071
-
ops: [{ action: 'create', path: `${collection}/${rkey}`, cid: recordCidStr }],
1072
-
blocks: buildCarFile(commitCidStr, newBlocks), // Full CAR with header
1073
-
rev, // Store the actual commit revision
1074
-
time: eventTime // Store the actual event time
1075
-
})
2211
+
ops: [
2212
+
{ action: 'create', path: `${collection}/${rkey}`, cid: recordCidStr },
2213
+
],
2214
+
blocks: buildCarFile(commitCidStr, newBlocks), // Full CAR with header
2215
+
rev, // Store the actual commit revision
2216
+
time: eventTime, // Store the actual event time
2217
+
});
1076
2218
this.sql.exec(
1077
2219
`INSERT INTO seq_events (did, commit_cid, evt) VALUES (?, ?, ?)`,
1078
-
did, commitCidStr, evt
1079
-
)
2220
+
did,
2221
+
commitCidStr,
2222
+
evt,
2223
+
);
1080
2224
1081
2225
// Broadcast to subscribers (both local and via default DO for relay)
1082
-
const evtRows = this.sql.exec(
1083
-
`SELECT * FROM seq_events ORDER BY seq DESC LIMIT 1`
1084
-
).toArray()
2226
+
const evtRows = /** @type {SeqEventRow[]} */ (
2227
+
this.sql
2228
+
.exec(`SELECT * FROM seq_events ORDER BY seq DESC LIMIT 1`)
2229
+
.toArray()
2230
+
);
1085
2231
if (evtRows.length > 0) {
1086
-
this.broadcastEvent(evtRows[0])
2232
+
this.broadcastEvent(evtRows[0]);
1087
2233
// Also forward to default DO for relay subscribers
1088
2234
if (this.env?.PDS) {
1089
-
const defaultId = this.env.PDS.idFromName('default')
1090
-
const defaultPds = this.env.PDS.get(defaultId)
2235
+
const defaultId = this.env.PDS.idFromName('default');
2236
+
const defaultPds = this.env.PDS.get(defaultId);
1091
2237
// Convert ArrayBuffer to array for JSON serialization
1092
-
const row = evtRows[0]
1093
-
const evtArray = Array.from(new Uint8Array(row.evt))
2238
+
const row = evtRows[0];
2239
+
const evtArray = Array.from(new Uint8Array(row.evt));
1094
2240
// Fire and forget but log errors
1095
-
defaultPds.fetch(new Request('http://internal/forward-event', {
1096
-
method: 'POST',
1097
-
body: JSON.stringify({ ...row, evt: evtArray })
1098
-
})).then(r => r.json()).then(r => console.log('forward result:', r)).catch(e => console.log('forward error:', e))
2241
+
defaultPds
2242
+
.fetch(
2243
+
new Request('http://internal/forward-event', {
2244
+
method: 'POST',
2245
+
body: JSON.stringify({ ...row, evt: evtArray }),
2246
+
}),
2247
+
)
2248
+
.catch(() => {}); // Ignore forward errors
1099
2249
}
1100
2250
}
1101
2251
1102
-
return { uri, cid: recordCidStr, commit: commitCidStr }
2252
+
return { uri, cid: recordCidStr, commit: commitCidStr };
1103
2253
}
1104
2254
2255
+
/**
2256
+
* @param {string} collection
2257
+
* @param {string} rkey
2258
+
*/
2259
+
async deleteRecord(collection, rkey) {
2260
+
const did = await this.getDid();
2261
+
if (!did) throw new Error('PDS not initialized');
2262
+
2263
+
const uri = `at://${did}/${collection}/${rkey}`;
2264
+
2265
+
// Check if record exists
2266
+
const existing = this.sql
2267
+
.exec(`SELECT cid FROM records WHERE uri = ?`, uri)
2268
+
.toArray();
2269
+
if (existing.length === 0) {
2270
+
return { error: 'RecordNotFound', message: 'record not found' };
2271
+
}
2272
+
2273
+
// Delete from records table
2274
+
this.sql.exec(`DELETE FROM records WHERE uri = ?`, uri);
2275
+
2276
+
// Get blobs associated with this record
2277
+
const associatedBlobs = this.sql
2278
+
.exec('SELECT blob_cid FROM record_blobs WHERE record_uri = ?', uri)
2279
+
.toArray();
2280
+
2281
+
// Remove associations for this record
2282
+
this.sql.exec('DELETE FROM record_blobs WHERE record_uri = ?', uri);
2283
+
2284
+
// Check each blob for orphan status and delete if unreferenced
2285
+
for (const { blob_cid } of associatedBlobs) {
2286
+
const stillReferenced = this.sql
2287
+
.exec('SELECT 1 FROM record_blobs WHERE blob_cid = ? LIMIT 1', blob_cid)
2288
+
.toArray();
2289
+
2290
+
if (stillReferenced.length === 0) {
2291
+
// Blob is orphaned, delete from R2 and database
2292
+
await this.env?.BLOBS?.delete(`${did}/${blob_cid}`);
2293
+
this.sql.exec('DELETE FROM blobs WHERE cid = ?', blob_cid);
2294
+
}
2295
+
}
2296
+
2297
+
// Rebuild MST
2298
+
const mst = new MST(this.sql);
2299
+
const dataRoot = await mst.computeRoot();
2300
+
2301
+
// Get previous commit
2302
+
const prevCommits = this.sql
2303
+
.exec(`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`)
2304
+
.toArray();
2305
+
const prevCommit = prevCommits.length > 0 ? prevCommits[0] : null;
2306
+
2307
+
// Create commit
2308
+
const rev = createTid();
2309
+
const commit = {
2310
+
did,
2311
+
version: 3,
2312
+
data: dataRoot
2313
+
? new CID(cidToBytes(/** @type {string} */ (dataRoot)))
2314
+
: null,
2315
+
rev,
2316
+
prev: prevCommit?.cid
2317
+
? new CID(cidToBytes(/** @type {string} */ (prevCommit.cid)))
2318
+
: null,
2319
+
};
2320
+
2321
+
// Sign commit
2322
+
const commitBytes = cborEncodeDagCbor(commit);
2323
+
const signingKey = await this.getSigningKey();
2324
+
if (!signingKey) throw new Error('No signing key');
2325
+
const sig = await sign(signingKey, commitBytes);
2326
+
2327
+
const signedCommit = { ...commit, sig };
2328
+
const signedBytes = cborEncodeDagCbor(signedCommit);
2329
+
const commitCid = await createCid(signedBytes);
2330
+
const commitCidStr = cidToString(commitCid);
2331
+
2332
+
// Store commit block
2333
+
this.sql.exec(
2334
+
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
2335
+
commitCidStr,
2336
+
signedBytes,
2337
+
);
2338
+
2339
+
// Store commit reference
2340
+
this.sql.exec(
2341
+
`INSERT INTO commits (cid, rev, prev) VALUES (?, ?, ?)`,
2342
+
commitCidStr,
2343
+
rev,
2344
+
prevCommit?.cid || null,
2345
+
);
2346
+
2347
+
// Update head and rev
2348
+
await this.state.storage.put('head', commitCidStr);
2349
+
await this.state.storage.put('rev', rev);
2350
+
2351
+
// Collect blocks for the event (commit + MST nodes, no record block)
2352
+
const newBlocks = [];
2353
+
newBlocks.push({ cid: commitCidStr, data: signedBytes });
2354
+
if (dataRoot) {
2355
+
const mstBlocks = this.collectMstBlocks(/** @type {string} */ (dataRoot));
2356
+
newBlocks.push(...mstBlocks);
2357
+
}
2358
+
2359
+
// Sequence event with delete action
2360
+
const eventTime = new Date().toISOString();
2361
+
const evt = cborEncode({
2362
+
ops: [{ action: 'delete', path: `${collection}/${rkey}`, cid: null }],
2363
+
blocks: buildCarFile(commitCidStr, newBlocks),
2364
+
rev,
2365
+
time: eventTime,
2366
+
});
2367
+
this.sql.exec(
2368
+
`INSERT INTO seq_events (did, commit_cid, evt) VALUES (?, ?, ?)`,
2369
+
did,
2370
+
commitCidStr,
2371
+
evt,
2372
+
);
2373
+
2374
+
// Broadcast to subscribers
2375
+
const evtRows = /** @type {SeqEventRow[]} */ (
2376
+
this.sql
2377
+
.exec(`SELECT * FROM seq_events ORDER BY seq DESC LIMIT 1`)
2378
+
.toArray()
2379
+
);
2380
+
if (evtRows.length > 0) {
2381
+
this.broadcastEvent(evtRows[0]);
2382
+
// Forward to default DO for relay subscribers
2383
+
if (this.env?.PDS) {
2384
+
const defaultId = this.env.PDS.idFromName('default');
2385
+
const defaultPds = this.env.PDS.get(defaultId);
2386
+
const row = evtRows[0];
2387
+
const evtArray = Array.from(new Uint8Array(row.evt));
2388
+
defaultPds
2389
+
.fetch(
2390
+
new Request('http://internal/forward-event', {
2391
+
method: 'POST',
2392
+
body: JSON.stringify({ ...row, evt: evtArray }),
2393
+
}),
2394
+
)
2395
+
.catch(() => {}); // Ignore forward errors
2396
+
}
2397
+
}
2398
+
2399
+
return { ok: true };
2400
+
}
2401
+
2402
+
/**
2403
+
* @param {SeqEventRow} evt
2404
+
* @returns {Uint8Array}
2405
+
*/
1105
2406
formatEvent(evt) {
1106
2407
// AT Protocol frame format: header + body
1107
2408
// Use DAG-CBOR encoding for body (CIDs need tag 42 + 0x00 prefix)
1108
-
const header = cborEncode({ op: 1, t: '#commit' })
2409
+
const header = cborEncode({ op: 1, t: '#commit' });
1109
2410
1110
2411
// Decode stored event to get ops, blocks, rev, and time
1111
-
const evtData = cborDecode(new Uint8Array(evt.evt))
1112
-
const ops = evtData.ops.map(op => ({
1113
-
...op,
1114
-
cid: op.cid ? new CID(cidToBytes(op.cid)) : null // Wrap in CID class for tag 42 encoding
1115
-
}))
2412
+
const evtData = cborDecode(new Uint8Array(evt.evt));
2413
+
/** @type {Array<{action: string, path: string, cid: CID|null}>} */
2414
+
const ops = evtData.ops.map(
2415
+
(/** @type {{action: string, path: string, cid?: string}} */ op) => ({
2416
+
...op,
2417
+
cid: op.cid ? new CID(cidToBytes(op.cid)) : null, // Wrap in CID class for tag 42 encoding
2418
+
}),
2419
+
);
1116
2420
// Get blocks from stored event (already in CAR format)
1117
-
const blocks = evtData.blocks || new Uint8Array(0)
2421
+
const blocks = evtData.blocks || new Uint8Array(0);
1118
2422
1119
2423
const body = cborEncodeDagCbor({
1120
2424
seq: evt.seq,
1121
2425
rebase: false,
1122
2426
tooBig: false,
1123
2427
repo: evt.did,
1124
-
commit: new CID(cidToBytes(evt.commit_cid)), // Wrap in CID class for tag 42 encoding
1125
-
rev: evtData.rev, // Use stored rev from commit creation
2428
+
commit: new CID(cidToBytes(evt.commit_cid)), // Wrap in CID class for tag 42 encoding
2429
+
rev: evtData.rev, // Use stored rev from commit creation
1126
2430
since: null,
1127
2431
blocks: blocks instanceof Uint8Array ? blocks : new Uint8Array(blocks),
1128
2432
ops,
1129
2433
blobs: [],
1130
-
time: evtData.time // Use stored time from event creation
1131
-
})
2434
+
time: evtData.time, // Use stored time from event creation
2435
+
});
1132
2436
1133
2437
// Concatenate header + body
1134
-
const frame = new Uint8Array(header.length + body.length)
1135
-
frame.set(header)
1136
-
frame.set(body, header.length)
1137
-
return frame
2438
+
const frame = new Uint8Array(header.length + body.length);
2439
+
frame.set(header);
2440
+
frame.set(body, header.length);
2441
+
return frame;
1138
2442
}
1139
2443
2444
+
/**
2445
+
* @param {WebSocket} ws
2446
+
* @param {string | ArrayBuffer} message
2447
+
*/
1140
2448
async webSocketMessage(ws, message) {
1141
2449
// Handle ping
1142
-
if (message === 'ping') ws.send('pong')
2450
+
if (message === 'ping') ws.send('pong');
1143
2451
}
1144
2452
1145
-
async webSocketClose(ws, code, reason) {
2453
+
/**
2454
+
* @param {WebSocket} _ws
2455
+
* @param {number} _code
2456
+
* @param {string} _reason
2457
+
*/
2458
+
async webSocketClose(_ws, _code, _reason) {
1146
2459
// Durable Object will hibernate when no connections remain
1147
2460
}
1148
2461
2462
+
/**
2463
+
* @param {SeqEventRow} evt
2464
+
*/
1149
2465
broadcastEvent(evt) {
1150
-
const frame = this.formatEvent(evt)
2466
+
const frame = this.formatEvent(evt);
1151
2467
for (const ws of this.state.getWebSockets()) {
1152
2468
try {
1153
-
ws.send(frame)
1154
-
} catch (e) {
2469
+
ws.send(frame);
2470
+
} catch (_e) {
1155
2471
// Client disconnected
1156
2472
}
1157
2473
}
1158
2474
}
1159
2475
1160
2476
async handleAtprotoDid() {
1161
-
let did = await this.getDid()
2477
+
let did = await this.getDid();
1162
2478
if (!did) {
1163
-
const registeredDids = await this.state.storage.get('registeredDids') || []
1164
-
did = registeredDids[0]
2479
+
/** @type {string[]} */
2480
+
const registeredDids =
2481
+
(await this.state.storage.get('registeredDids')) || [];
2482
+
did = registeredDids[0];
1165
2483
}
1166
2484
if (!did) {
1167
-
return new Response('User not found', { status: 404 })
2485
+
return new Response('User not found', { status: 404 });
1168
2486
}
1169
-
return new Response(did, { headers: { 'Content-Type': 'text/plain' } })
2487
+
return new Response(/** @type {string} */ (did), {
2488
+
headers: { 'Content-Type': 'text/plain' },
2489
+
});
1170
2490
}
1171
2491
2492
+
/** @param {Request} request */
1172
2493
async handleInit(request) {
1173
-
const body = await request.json()
2494
+
const body = await request.json();
1174
2495
if (!body.did || !body.privateKey) {
1175
-
return Response.json({ error: 'missing did or privateKey' }, { status: 400 })
2496
+
return errorResponse('InvalidRequest', 'missing did or privateKey', 400);
1176
2497
}
1177
-
await this.initIdentity(body.did, body.privateKey, body.handle || null)
1178
-
return Response.json({ ok: true, did: body.did, handle: body.handle || null })
2498
+
await this.initIdentity(body.did, body.privateKey, body.handle || null);
2499
+
return Response.json({
2500
+
ok: true,
2501
+
did: body.did,
2502
+
handle: body.handle || null,
2503
+
});
1179
2504
}
1180
2505
1181
2506
async handleStatus() {
1182
-
const did = await this.getDid()
1183
-
return Response.json({ initialized: !!did, did: did || null })
1184
-
}
1185
-
1186
-
async handleResetRepo() {
1187
-
this.sql.exec(`DELETE FROM blocks`)
1188
-
this.sql.exec(`DELETE FROM records`)
1189
-
this.sql.exec(`DELETE FROM commits`)
1190
-
this.sql.exec(`DELETE FROM seq_events`)
1191
-
await this.state.storage.delete('head')
1192
-
await this.state.storage.delete('rev')
1193
-
return Response.json({ ok: true, message: 'repo data cleared' })
2507
+
const did = await this.getDid();
2508
+
return Response.json({ initialized: !!did, did: did || null });
1194
2509
}
1195
2510
2511
+
/** @param {Request} request */
1196
2512
async handleForwardEvent(request) {
1197
-
const evt = await request.json()
1198
-
const numSockets = [...this.state.getWebSockets()].length
1199
-
console.log(`forward-event: received event seq=${evt.seq}, ${numSockets} connected sockets`)
2513
+
const evt = await request.json();
2514
+
const numSockets = [...this.state.getWebSockets()].length;
1200
2515
this.broadcastEvent({
1201
2516
seq: evt.seq,
1202
2517
did: evt.did,
1203
2518
commit_cid: evt.commit_cid,
1204
-
evt: new Uint8Array(Object.values(evt.evt))
1205
-
})
1206
-
return Response.json({ ok: true, sockets: numSockets })
2519
+
evt: new Uint8Array(Object.values(evt.evt)),
2520
+
});
2521
+
return Response.json({ ok: true, sockets: numSockets });
1207
2522
}
1208
2523
2524
+
/** @param {Request} request */
1209
2525
async handleRegisterDid(request) {
1210
-
const body = await request.json()
1211
-
const registeredDids = await this.state.storage.get('registeredDids') || []
2526
+
const body = await request.json();
2527
+
/** @type {string[]} */
2528
+
const registeredDids =
2529
+
(await this.state.storage.get('registeredDids')) || [];
1212
2530
if (!registeredDids.includes(body.did)) {
1213
-
registeredDids.push(body.did)
1214
-
await this.state.storage.put('registeredDids', registeredDids)
2531
+
registeredDids.push(body.did);
2532
+
await this.state.storage.put('registeredDids', registeredDids);
1215
2533
}
1216
-
return Response.json({ ok: true })
2534
+
return Response.json({ ok: true });
1217
2535
}
1218
2536
1219
2537
async handleGetRegisteredDids() {
1220
-
const registeredDids = await this.state.storage.get('registeredDids') || []
1221
-
return Response.json({ dids: registeredDids })
2538
+
const registeredDids =
2539
+
(await this.state.storage.get('registeredDids')) || [];
2540
+
return Response.json({ dids: registeredDids });
2541
+
}
2542
+
2543
+
/** @param {Request} request */
2544
+
async handleRegisterHandle(request) {
2545
+
const body = await request.json();
2546
+
const { handle, did } = body;
2547
+
if (!handle || !did) {
2548
+
return errorResponse('InvalidRequest', 'missing handle or did', 400);
2549
+
}
2550
+
/** @type {Record<string, string>} */
2551
+
const handleMap = (await this.state.storage.get('handleMap')) || {};
2552
+
handleMap[handle] = did;
2553
+
await this.state.storage.put('handleMap', handleMap);
2554
+
return Response.json({ ok: true });
2555
+
}
2556
+
2557
+
/** @param {URL} url */
2558
+
async handleResolveHandle(url) {
2559
+
const handle = url.searchParams.get('handle');
2560
+
if (!handle) {
2561
+
return errorResponse('InvalidRequest', 'missing handle', 400);
2562
+
}
2563
+
/** @type {Record<string, string>} */
2564
+
const handleMap = (await this.state.storage.get('handleMap')) || {};
2565
+
const did = handleMap[handle];
2566
+
if (!did) {
2567
+
return errorResponse('NotFound', 'handle not found', 404);
2568
+
}
2569
+
return Response.json({ did });
1222
2570
}
1223
2571
1224
2572
async handleRepoInfo() {
1225
-
const head = await this.state.storage.get('head')
1226
-
const rev = await this.state.storage.get('rev')
1227
-
return Response.json({ head: head || null, rev: rev || null })
2573
+
const head = await this.state.storage.get('head');
2574
+
const rev = await this.state.storage.get('rev');
2575
+
return Response.json({ head: head || null, rev: rev || null });
1228
2576
}
1229
2577
2578
+
/** @param {Request} request */
1230
2579
handleDescribeServer(request) {
1231
-
const hostname = request.headers.get('x-hostname') || 'localhost'
2580
+
const hostname = request.headers.get('x-hostname') || 'localhost';
1232
2581
return Response.json({
1233
2582
did: `did:web:${hostname}`,
1234
2583
availableUserDomains: [`.${hostname}`],
1235
2584
inviteCodeRequired: false,
1236
2585
phoneVerificationRequired: false,
1237
2586
links: {},
1238
-
contact: {}
1239
-
})
2587
+
contact: {},
2588
+
});
2589
+
}
2590
+
2591
+
/** @param {Request} request */
2592
+
async handleCreateSession(request) {
2593
+
const body = await request.json();
2594
+
const { identifier, password } = body;
2595
+
2596
+
if (!identifier || !password) {
2597
+
return errorResponse(
2598
+
'InvalidRequest',
2599
+
'Missing identifier or password',
2600
+
400,
2601
+
);
2602
+
}
2603
+
2604
+
// Check password against env var (timing-safe comparison)
2605
+
const expectedPassword = this.env?.PDS_PASSWORD;
2606
+
if (
2607
+
!expectedPassword ||
2608
+
!(await timingSafeEqual(password, expectedPassword))
2609
+
) {
2610
+
return errorResponse(
2611
+
'AuthRequired',
2612
+
'Invalid identifier or password',
2613
+
401,
2614
+
);
2615
+
}
2616
+
2617
+
// Resolve identifier to DID
2618
+
let did = identifier;
2619
+
if (!identifier.startsWith('did:')) {
2620
+
// Try to resolve handle
2621
+
/** @type {Record<string, string>} */
2622
+
const handleMap = (await this.state.storage.get('handleMap')) || {};
2623
+
did = handleMap[identifier];
2624
+
if (!did) {
2625
+
return errorResponse('InvalidRequest', 'Unable to resolve handle', 400);
2626
+
}
2627
+
}
2628
+
2629
+
// Get handle for response
2630
+
const handle = await this.getHandleForDid(did);
2631
+
2632
+
// Create tokens
2633
+
const jwtSecret = this.env?.JWT_SECRET;
2634
+
if (!jwtSecret) {
2635
+
return errorResponse(
2636
+
'InternalServerError',
2637
+
'Server not configured for authentication',
2638
+
500,
2639
+
);
2640
+
}
2641
+
2642
+
const accessJwt = await createAccessJwt(did, jwtSecret);
2643
+
const refreshJwt = await createRefreshJwt(did, jwtSecret);
2644
+
2645
+
return Response.json({
2646
+
accessJwt,
2647
+
refreshJwt,
2648
+
handle: handle || did,
2649
+
did,
2650
+
active: true,
2651
+
});
2652
+
}
2653
+
2654
+
/** @param {Request} request */
2655
+
async handleGetSession(request) {
2656
+
const authHeader = request.headers.get('Authorization');
2657
+
if (!authHeader) {
2658
+
return errorResponse(
2659
+
'AuthRequired',
2660
+
'Missing or invalid authorization header',
2661
+
401,
2662
+
);
2663
+
}
2664
+
2665
+
let did;
2666
+
2667
+
// OAuth DPoP token
2668
+
if (authHeader.startsWith('DPoP ')) {
2669
+
try {
2670
+
const result = await verifyOAuthAccessToken(
2671
+
request,
2672
+
authHeader.slice(5),
2673
+
this,
2674
+
);
2675
+
did = result.did;
2676
+
} catch (err) {
2677
+
const message = err instanceof Error ? err.message : String(err);
2678
+
return errorResponse('InvalidToken', message, 401);
2679
+
}
2680
+
}
2681
+
// Legacy Bearer token
2682
+
else if (authHeader.startsWith('Bearer ')) {
2683
+
const token = authHeader.slice(7);
2684
+
const jwtSecret = this.env?.JWT_SECRET;
2685
+
if (!jwtSecret) {
2686
+
return errorResponse(
2687
+
'InternalServerError',
2688
+
'Server not configured for authentication',
2689
+
500,
2690
+
);
2691
+
}
2692
+
2693
+
try {
2694
+
const payload = await verifyAccessJwt(token, jwtSecret);
2695
+
did = payload.sub;
2696
+
} catch (err) {
2697
+
const message = err instanceof Error ? err.message : String(err);
2698
+
return errorResponse('InvalidToken', message, 401);
2699
+
}
2700
+
} else {
2701
+
return errorResponse(
2702
+
'AuthRequired',
2703
+
'Invalid authorization header format',
2704
+
401,
2705
+
);
2706
+
}
2707
+
2708
+
const handle = await this.getHandleForDid(did);
2709
+
return Response.json({
2710
+
handle: handle || did,
2711
+
did,
2712
+
active: true,
2713
+
});
2714
+
}
2715
+
2716
+
/** @param {Request} request */
2717
+
async handleRefreshSession(request) {
2718
+
const authHeader = request.headers.get('Authorization');
2719
+
if (!authHeader || !authHeader.startsWith('Bearer ')) {
2720
+
return errorResponse(
2721
+
'AuthRequired',
2722
+
'Missing or invalid authorization header',
2723
+
401,
2724
+
);
2725
+
}
2726
+
2727
+
const token = authHeader.slice(7); // Remove 'Bearer '
2728
+
const jwtSecret = this.env?.JWT_SECRET;
2729
+
if (!jwtSecret) {
2730
+
return errorResponse(
2731
+
'InternalServerError',
2732
+
'Server not configured for authentication',
2733
+
500,
2734
+
);
2735
+
}
2736
+
2737
+
try {
2738
+
const payload = await verifyRefreshJwt(token, jwtSecret);
2739
+
const did = payload.sub;
2740
+
const handle = await this.getHandleForDid(did);
2741
+
2742
+
// Issue fresh tokens
2743
+
const accessJwt = await createAccessJwt(did, jwtSecret);
2744
+
const refreshJwt = await createRefreshJwt(did, jwtSecret);
2745
+
2746
+
return Response.json({
2747
+
accessJwt,
2748
+
refreshJwt,
2749
+
handle: handle || did,
2750
+
did,
2751
+
active: true,
2752
+
});
2753
+
} catch (err) {
2754
+
const message = err instanceof Error ? err.message : String(err);
2755
+
if (message === 'Token expired') {
2756
+
return errorResponse('ExpiredToken', 'Refresh token has expired', 400);
2757
+
}
2758
+
return errorResponse('InvalidToken', message, 400);
2759
+
}
2760
+
}
2761
+
2762
+
/** @param {Request} _request */
2763
+
async handleGetPreferences(_request) {
2764
+
// Preferences are stored per-user in their DO
2765
+
const preferences = (await this.state.storage.get('preferences')) || [];
2766
+
return Response.json({ preferences });
2767
+
}
2768
+
2769
+
/** @param {Request} request */
2770
+
async handlePutPreferences(request) {
2771
+
const body = await request.json();
2772
+
const { preferences } = body;
2773
+
if (!Array.isArray(preferences)) {
2774
+
return errorResponse(
2775
+
'InvalidRequest',
2776
+
'preferences must be an array',
2777
+
400,
2778
+
);
2779
+
}
2780
+
await this.state.storage.put('preferences', preferences);
2781
+
return Response.json({});
2782
+
}
2783
+
2784
+
/**
2785
+
* @param {string} did
2786
+
* @returns {Promise<string|null>}
2787
+
*/
2788
+
async getHandleForDid(did) {
2789
+
// Check if this DID has a handle registered
2790
+
/** @type {Record<string, string>} */
2791
+
const handleMap = (await this.state.storage.get('handleMap')) || {};
2792
+
for (const [handle, mappedDid] of Object.entries(handleMap)) {
2793
+
if (mappedDid === did) return handle;
2794
+
}
2795
+
// Check instance's own handle
2796
+
const instanceDid = await this.getDid();
2797
+
if (instanceDid === did) {
2798
+
return /** @type {string|null} */ (
2799
+
await this.state.storage.get('handle')
2800
+
);
2801
+
}
2802
+
return null;
2803
+
}
2804
+
2805
+
/**
2806
+
* @param {string} did
2807
+
* @param {string|null} lxm
2808
+
*/
2809
+
async createServiceAuthForAppView(did, lxm) {
2810
+
const signingKey = await this.getSigningKey();
2811
+
if (!signingKey) throw new Error('No signing key available');
2812
+
return createServiceJwt({
2813
+
iss: did,
2814
+
aud: 'did:web:api.bsky.app',
2815
+
lxm,
2816
+
signingKey,
2817
+
});
2818
+
}
2819
+
2820
+
/**
2821
+
* @param {Request} request
2822
+
* @param {string} userDid
2823
+
*/
2824
+
async handleAppViewProxy(request, userDid) {
2825
+
const url = new URL(request.url);
2826
+
const lxm = url.pathname.replace('/xrpc/', '');
2827
+
const serviceJwt = await this.createServiceAuthForAppView(userDid, lxm);
2828
+
return proxyToService(request, BSKY_APPVIEW_URL, `Bearer ${serviceJwt}`);
1240
2829
}
1241
2830
1242
2831
async handleListRepos() {
1243
-
const registeredDids = await this.state.storage.get('registeredDids') || []
1244
-
const did = await this.getDid()
1245
-
const repos = did ? [{ did, head: null, rev: null }] :
1246
-
registeredDids.map(d => ({ did: d, head: null, rev: null }))
1247
-
return Response.json({ repos })
2832
+
/** @type {string[]} */
2833
+
const registeredDids =
2834
+
(await this.state.storage.get('registeredDids')) || [];
2835
+
const did = await this.getDid();
2836
+
const repos = did
2837
+
? [{ did, head: null, rev: null }]
2838
+
: registeredDids.map((/** @type {string} */ d) => ({
2839
+
did: d,
2840
+
head: null,
2841
+
rev: null,
2842
+
}));
2843
+
return Response.json({ repos });
1248
2844
}
1249
2845
2846
+
/** @param {Request} request */
1250
2847
async handleCreateRecord(request) {
1251
-
const body = await request.json()
2848
+
const body = await request.json();
1252
2849
if (!body.collection || !body.record) {
1253
-
return Response.json({ error: 'missing collection or record' }, { status: 400 })
2850
+
return errorResponse(
2851
+
'InvalidRequest',
2852
+
'missing collection or record',
2853
+
400,
2854
+
);
1254
2855
}
1255
2856
try {
1256
-
const result = await this.createRecord(body.collection, body.record, body.rkey)
1257
-
return Response.json(result)
2857
+
const result = await this.createRecord(
2858
+
body.collection,
2859
+
body.record,
2860
+
body.rkey,
2861
+
);
2862
+
const head = await this.state.storage.get('head');
2863
+
const rev = await this.state.storage.get('rev');
2864
+
return Response.json({
2865
+
uri: result.uri,
2866
+
cid: result.cid,
2867
+
commit: { cid: head, rev },
2868
+
validationStatus: 'valid',
2869
+
});
1258
2870
} catch (err) {
1259
-
return Response.json({ error: err.message }, { status: 500 })
2871
+
const message = err instanceof Error ? err.message : String(err);
2872
+
return errorResponse('InternalError', message, 500);
1260
2873
}
1261
2874
}
1262
2875
2876
+
/** @param {Request} request */
2877
+
async handleDeleteRecord(request) {
2878
+
const body = await request.json();
2879
+
if (!body.collection || !body.rkey) {
2880
+
return errorResponse('InvalidRequest', 'missing collection or rkey', 400);
2881
+
}
2882
+
try {
2883
+
const result = await this.deleteRecord(body.collection, body.rkey);
2884
+
if (result.error) {
2885
+
return errorResponse(result.error, result.message, 404);
2886
+
}
2887
+
return Response.json({});
2888
+
} catch (err) {
2889
+
const message = err instanceof Error ? err.message : String(err);
2890
+
return errorResponse('InternalError', message, 500);
2891
+
}
2892
+
}
2893
+
2894
+
/** @param {Request} request */
2895
+
async handlePutRecord(request) {
2896
+
const body = await request.json();
2897
+
if (!body.collection || !body.rkey || !body.record) {
2898
+
return errorResponse(
2899
+
'InvalidRequest',
2900
+
'missing collection, rkey, or record',
2901
+
400,
2902
+
);
2903
+
}
2904
+
try {
2905
+
// putRecord is like createRecord but with a specific rkey (upsert)
2906
+
const result = await this.createRecord(
2907
+
body.collection,
2908
+
body.record,
2909
+
body.rkey,
2910
+
);
2911
+
const head = await this.state.storage.get('head');
2912
+
const rev = await this.state.storage.get('rev');
2913
+
return Response.json({
2914
+
uri: result.uri,
2915
+
cid: result.cid,
2916
+
commit: { cid: head, rev },
2917
+
validationStatus: 'valid',
2918
+
});
2919
+
} catch (err) {
2920
+
const message = err instanceof Error ? err.message : String(err);
2921
+
return errorResponse('InternalError', message, 500);
2922
+
}
2923
+
}
2924
+
2925
+
/** @param {Request} request */
2926
+
async handleApplyWrites(request) {
2927
+
const body = await request.json();
2928
+
if (!body.writes || !Array.isArray(body.writes)) {
2929
+
return errorResponse('InvalidRequest', 'missing writes array', 400);
2930
+
}
2931
+
try {
2932
+
const results = [];
2933
+
for (const write of body.writes) {
2934
+
const type = write.$type;
2935
+
if (type === 'com.atproto.repo.applyWrites#create') {
2936
+
const result = await this.createRecord(
2937
+
write.collection,
2938
+
write.value,
2939
+
write.rkey,
2940
+
);
2941
+
results.push({
2942
+
$type: 'com.atproto.repo.applyWrites#createResult',
2943
+
uri: result.uri,
2944
+
cid: result.cid,
2945
+
validationStatus: 'valid',
2946
+
});
2947
+
} else if (type === 'com.atproto.repo.applyWrites#update') {
2948
+
const result = await this.createRecord(
2949
+
write.collection,
2950
+
write.value,
2951
+
write.rkey,
2952
+
);
2953
+
results.push({
2954
+
$type: 'com.atproto.repo.applyWrites#updateResult',
2955
+
uri: result.uri,
2956
+
cid: result.cid,
2957
+
validationStatus: 'valid',
2958
+
});
2959
+
} else if (type === 'com.atproto.repo.applyWrites#delete') {
2960
+
await this.deleteRecord(write.collection, write.rkey);
2961
+
results.push({
2962
+
$type: 'com.atproto.repo.applyWrites#deleteResult',
2963
+
});
2964
+
} else {
2965
+
return errorResponse(
2966
+
'InvalidRequest',
2967
+
`Unknown write operation type: ${type}`,
2968
+
400,
2969
+
);
2970
+
}
2971
+
}
2972
+
// Return commit info
2973
+
const head = await this.state.storage.get('head');
2974
+
const rev = await this.state.storage.get('rev');
2975
+
return Response.json({ commit: { cid: head, rev }, results });
2976
+
} catch (err) {
2977
+
const message = err instanceof Error ? err.message : String(err);
2978
+
return errorResponse('InternalError', message, 500);
2979
+
}
2980
+
}
2981
+
2982
+
/** @param {URL} url */
1263
2983
async handleGetRecord(url) {
1264
-
const collection = url.searchParams.get('collection')
1265
-
const rkey = url.searchParams.get('rkey')
2984
+
const collection = url.searchParams.get('collection');
2985
+
const rkey = url.searchParams.get('rkey');
1266
2986
if (!collection || !rkey) {
1267
-
return Response.json({ error: 'missing collection or rkey' }, { status: 400 })
2987
+
return errorResponse('InvalidRequest', 'missing collection or rkey', 400);
1268
2988
}
1269
-
const did = await this.getDid()
1270
-
const uri = `at://${did}/${collection}/${rkey}`
1271
-
const rows = this.sql.exec(
1272
-
`SELECT cid, value FROM records WHERE uri = ?`, uri
1273
-
).toArray()
2989
+
const did = await this.getDid();
2990
+
const uri = `at://${did}/${collection}/${rkey}`;
2991
+
const rows = /** @type {RecordRow[]} */ (
2992
+
this.sql
2993
+
.exec(`SELECT cid, value FROM records WHERE uri = ?`, uri)
2994
+
.toArray()
2995
+
);
1274
2996
if (rows.length === 0) {
1275
-
return Response.json({ error: 'record not found' }, { status: 404 })
2997
+
return errorResponse('RecordNotFound', 'record not found', 404);
1276
2998
}
1277
-
const row = rows[0]
1278
-
const value = cborDecode(new Uint8Array(row.value))
1279
-
return Response.json({ uri, cid: row.cid, value })
2999
+
const row = rows[0];
3000
+
const value = cborDecode(new Uint8Array(row.value));
3001
+
return Response.json({ uri, cid: row.cid, value });
3002
+
}
3003
+
3004
+
async handleDescribeRepo() {
3005
+
const did = await this.getDid();
3006
+
if (!did) {
3007
+
return errorResponse('RepoNotFound', 'repo not found', 404);
3008
+
}
3009
+
const handle = await this.state.storage.get('handle');
3010
+
// Get unique collections
3011
+
const collections = this.sql
3012
+
.exec(`SELECT DISTINCT collection FROM records`)
3013
+
.toArray()
3014
+
.map((r) => r.collection);
3015
+
3016
+
return Response.json({
3017
+
handle: handle || did,
3018
+
did,
3019
+
didDoc: {},
3020
+
collections,
3021
+
handleIsCorrect: !!handle,
3022
+
});
3023
+
}
3024
+
3025
+
/** @param {URL} url */
3026
+
async handleListRecords(url) {
3027
+
const collection = url.searchParams.get('collection');
3028
+
if (!collection) {
3029
+
return errorResponse('InvalidRequest', 'missing collection', 400);
3030
+
}
3031
+
const limit = Math.min(
3032
+
parseInt(url.searchParams.get('limit') || '50', 10),
3033
+
100,
3034
+
);
3035
+
const reverse = url.searchParams.get('reverse') === 'true';
3036
+
const _cursor = url.searchParams.get('cursor');
3037
+
3038
+
const _did = await this.getDid();
3039
+
const query = `SELECT uri, cid, value FROM records WHERE collection = ? ORDER BY rkey ${reverse ? 'DESC' : 'ASC'} LIMIT ?`;
3040
+
const params = [collection, limit + 1];
3041
+
3042
+
const rows = /** @type {RecordRow[]} */ (
3043
+
this.sql.exec(query, ...params).toArray()
3044
+
);
3045
+
const hasMore = rows.length > limit;
3046
+
const records = rows.slice(0, limit).map((r) => ({
3047
+
uri: r.uri,
3048
+
cid: r.cid,
3049
+
value: cborDecode(new Uint8Array(r.value)),
3050
+
}));
3051
+
3052
+
return Response.json({
3053
+
records,
3054
+
cursor: hasMore ? records[records.length - 1]?.uri : undefined,
3055
+
});
1280
3056
}
1281
3057
1282
3058
handleGetLatestCommit() {
1283
-
const commits = this.sql.exec(
1284
-
`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`
1285
-
).toArray()
3059
+
const commits = /** @type {CommitRow[]} */ (
3060
+
this.sql
3061
+
.exec(`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`)
3062
+
.toArray()
3063
+
);
1286
3064
if (commits.length === 0) {
1287
-
return Response.json({ error: 'RepoNotFound', message: 'repo not found' }, { status: 404 })
3065
+
return errorResponse('RepoNotFound', 'repo not found', 404);
1288
3066
}
1289
-
return Response.json({ cid: commits[0].cid, rev: commits[0].rev })
3067
+
return Response.json({ cid: commits[0].cid, rev: commits[0].rev });
1290
3068
}
1291
3069
1292
3070
async handleGetRepoStatus() {
1293
-
const did = await this.getDid()
1294
-
const commits = this.sql.exec(
1295
-
`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`
1296
-
).toArray()
3071
+
const did = await this.getDid();
3072
+
const commits = /** @type {CommitRow[]} */ (
3073
+
this.sql
3074
+
.exec(`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`)
3075
+
.toArray()
3076
+
);
1297
3077
if (commits.length === 0 || !did) {
1298
-
return Response.json({ error: 'RepoNotFound', message: 'repo not found' }, { status: 404 })
3078
+
return errorResponse('RepoNotFound', 'repo not found', 404);
1299
3079
}
1300
-
return Response.json({ did, active: true, status: 'active', rev: commits[0].rev })
3080
+
return Response.json({
3081
+
did,
3082
+
active: true,
3083
+
status: 'active',
3084
+
rev: commits[0].rev,
3085
+
});
1301
3086
}
1302
3087
1303
3088
handleGetRepo() {
1304
-
const commits = this.sql.exec(
1305
-
`SELECT cid FROM commits ORDER BY seq DESC LIMIT 1`
1306
-
).toArray()
3089
+
const commits = /** @type {CommitRow[]} */ (
3090
+
this.sql
3091
+
.exec(`SELECT cid FROM commits ORDER BY seq DESC LIMIT 1`)
3092
+
.toArray()
3093
+
);
3094
+
if (commits.length === 0) {
3095
+
return errorResponse('RepoNotFound', 'repo not found', 404);
3096
+
}
3097
+
3098
+
// Only include blocks reachable from the current commit
3099
+
const commitCid = commits[0].cid;
3100
+
const neededCids = new Set();
3101
+
3102
+
// Helper to get block data
3103
+
/** @param {string} cid */
3104
+
const getBlock = (cid) => {
3105
+
const rows = /** @type {BlockRow[]} */ (
3106
+
this.sql.exec(`SELECT data FROM blocks WHERE cid = ?`, cid).toArray()
3107
+
);
3108
+
return rows.length > 0 ? new Uint8Array(rows[0].data) : null;
3109
+
};
3110
+
3111
+
// Collect all reachable blocks starting from commit
3112
+
/** @param {string} cid */
3113
+
const collectBlocks = (cid) => {
3114
+
if (neededCids.has(cid)) return;
3115
+
neededCids.add(cid);
3116
+
3117
+
const data = getBlock(cid);
3118
+
if (!data) return;
3119
+
3120
+
// Decode CBOR to find CID references
3121
+
try {
3122
+
const decoded = cborDecode(data);
3123
+
if (decoded && typeof decoded === 'object') {
3124
+
// Commit object - follow 'data' (MST root)
3125
+
if (decoded.data instanceof Uint8Array) {
3126
+
collectBlocks(cidToString(decoded.data));
3127
+
}
3128
+
// MST node - follow 'l' and entries' 'v' and 't'
3129
+
if (decoded.l instanceof Uint8Array) {
3130
+
collectBlocks(cidToString(decoded.l));
3131
+
}
3132
+
if (Array.isArray(decoded.e)) {
3133
+
for (const entry of decoded.e) {
3134
+
if (entry.v instanceof Uint8Array) {
3135
+
collectBlocks(cidToString(entry.v));
3136
+
}
3137
+
if (entry.t instanceof Uint8Array) {
3138
+
collectBlocks(cidToString(entry.t));
3139
+
}
3140
+
}
3141
+
}
3142
+
}
3143
+
} catch (_e) {
3144
+
// Not a structured block, that's fine
3145
+
}
3146
+
};
3147
+
3148
+
collectBlocks(commitCid);
3149
+
3150
+
// Build CAR with only needed blocks
3151
+
const blocksForCar = [];
3152
+
for (const cid of neededCids) {
3153
+
const data = getBlock(cid);
3154
+
if (data) {
3155
+
blocksForCar.push({ cid, data });
3156
+
}
3157
+
}
3158
+
3159
+
const car = buildCarFile(commitCid, blocksForCar);
3160
+
return new Response(/** @type {BodyInit} */ (car), {
3161
+
headers: { 'content-type': 'application/vnd.ipld.car' },
3162
+
});
3163
+
}
3164
+
3165
+
/** @param {URL} url */
3166
+
async handleSyncGetRecord(url) {
3167
+
const collection = url.searchParams.get('collection');
3168
+
const rkey = url.searchParams.get('rkey');
3169
+
if (!collection || !rkey) {
3170
+
return errorResponse('InvalidRequest', 'missing collection or rkey', 400);
3171
+
}
3172
+
const did = await this.getDid();
3173
+
const uri = `at://${did}/${collection}/${rkey}`;
3174
+
const rows = /** @type {RecordRow[]} */ (
3175
+
this.sql.exec(`SELECT cid FROM records WHERE uri = ?`, uri).toArray()
3176
+
);
3177
+
if (rows.length === 0) {
3178
+
return errorResponse('RecordNotFound', 'record not found', 404);
3179
+
}
3180
+
const recordCid = rows[0].cid;
3181
+
3182
+
// Get latest commit
3183
+
const commits = /** @type {CommitRow[]} */ (
3184
+
this.sql
3185
+
.exec(`SELECT cid FROM commits ORDER BY seq DESC LIMIT 1`)
3186
+
.toArray()
3187
+
);
1307
3188
if (commits.length === 0) {
1308
-
return Response.json({ error: 'repo not found' }, { status: 404 })
3189
+
return errorResponse('RepoNotFound', 'no commits', 404);
3190
+
}
3191
+
const commitCid = commits[0].cid;
3192
+
3193
+
// Build proof chain: commit -> MST path -> record
3194
+
// Include commit block, all MST nodes on path to record, and record block
3195
+
/** @type {Array<{cid: string, data: Uint8Array}>} */
3196
+
const blocks = [];
3197
+
const included = new Set();
3198
+
3199
+
/** @param {string} cidStr */
3200
+
const addBlock = (cidStr) => {
3201
+
if (included.has(cidStr)) return;
3202
+
included.add(cidStr);
3203
+
const blockRows = /** @type {BlockRow[]} */ (
3204
+
this.sql.exec(`SELECT data FROM blocks WHERE cid = ?`, cidStr).toArray()
3205
+
);
3206
+
if (blockRows.length > 0) {
3207
+
blocks.push({ cid: cidStr, data: new Uint8Array(blockRows[0].data) });
3208
+
}
3209
+
};
3210
+
3211
+
// Add commit block
3212
+
addBlock(commitCid);
3213
+
3214
+
// Get commit to find data root
3215
+
const commitRows = /** @type {BlockRow[]} */ (
3216
+
this.sql
3217
+
.exec(`SELECT data FROM blocks WHERE cid = ?`, commitCid)
3218
+
.toArray()
3219
+
);
3220
+
if (commitRows.length > 0) {
3221
+
const commit = cborDecode(new Uint8Array(commitRows[0].data));
3222
+
if (commit.data) {
3223
+
const dataRootCid = cidToString(commit.data);
3224
+
// Collect MST path blocks (this includes all MST nodes)
3225
+
const mstBlocks = this.collectMstBlocks(dataRootCid);
3226
+
for (const block of mstBlocks) {
3227
+
addBlock(block.cid);
3228
+
}
3229
+
}
3230
+
}
3231
+
3232
+
// Add record block
3233
+
addBlock(recordCid);
3234
+
3235
+
const car = buildCarFile(commitCid, blocks);
3236
+
return new Response(/** @type {BodyInit} */ (car), {
3237
+
headers: { 'content-type': 'application/vnd.ipld.car' },
3238
+
});
3239
+
}
3240
+
3241
+
/** @param {Request} request */
3242
+
async handleUploadBlob(request) {
3243
+
// Check if auth was already done by outer handler (OAuth/DPoP flow)
3244
+
const authedDid = request.headers.get('x-authed-did');
3245
+
if (!authedDid) {
3246
+
// Fallback to legacy Bearer token auth
3247
+
const authHeader = request.headers.get('Authorization');
3248
+
if (!authHeader || !authHeader.startsWith('Bearer ')) {
3249
+
return errorResponse(
3250
+
'AuthRequired',
3251
+
'Missing or invalid authorization header',
3252
+
401,
3253
+
);
3254
+
}
3255
+
3256
+
const token = authHeader.slice(7);
3257
+
const jwtSecret = this.env?.JWT_SECRET;
3258
+
if (!jwtSecret) {
3259
+
return errorResponse(
3260
+
'InternalServerError',
3261
+
'Server not configured for authentication',
3262
+
500,
3263
+
);
3264
+
}
3265
+
3266
+
try {
3267
+
await verifyAccessJwt(token, jwtSecret);
3268
+
} catch (err) {
3269
+
const message = err instanceof Error ? err.message : String(err);
3270
+
return errorResponse('InvalidToken', message, 401);
3271
+
}
3272
+
}
3273
+
3274
+
const did = await this.getDid();
3275
+
if (!did) {
3276
+
return errorResponse('InvalidRequest', 'PDS not initialized', 400);
3277
+
}
3278
+
3279
+
// Read body as ArrayBuffer
3280
+
const bodyBytes = await request.arrayBuffer();
3281
+
const size = bodyBytes.byteLength;
3282
+
3283
+
// Check size limits
3284
+
if (size === 0) {
3285
+
return errorResponse(
3286
+
'InvalidRequest',
3287
+
'Empty blobs are not allowed',
3288
+
400,
3289
+
);
3290
+
}
3291
+
const MAX_BLOB_SIZE = 50 * 1024 * 1024;
3292
+
if (size > MAX_BLOB_SIZE) {
3293
+
return errorResponse(
3294
+
'BlobTooLarge',
3295
+
`Blob size ${size} exceeds maximum ${MAX_BLOB_SIZE}`,
3296
+
400,
3297
+
);
3298
+
}
3299
+
3300
+
// Sniff MIME type, fall back to Content-Type header
3301
+
const contentType =
3302
+
request.headers.get('Content-Type') || 'application/octet-stream';
3303
+
const sniffed = sniffMimeType(bodyBytes);
3304
+
const mimeType = sniffed || contentType;
3305
+
3306
+
// Compute CID using raw codec for blobs
3307
+
const cid = await createBlobCid(new Uint8Array(bodyBytes));
3308
+
const cidStr = cidToString(cid);
3309
+
3310
+
// Upload to R2 (idempotent - same CID always has same content)
3311
+
const r2Key = `${did}/${cidStr}`;
3312
+
await this.env?.BLOBS?.put(r2Key, bodyBytes, {
3313
+
httpMetadata: { contentType: mimeType },
3314
+
});
3315
+
3316
+
// Insert metadata (INSERT OR IGNORE handles concurrent uploads)
3317
+
const created_at = new Date().toISOString();
3318
+
this.sql.exec(
3319
+
'INSERT OR IGNORE INTO blobs (cid, mime_type, size, created_at) VALUES (?, ?, ?, ?)',
3320
+
cidStr,
3321
+
mimeType,
3322
+
size,
3323
+
created_at,
3324
+
);
3325
+
3326
+
// Return BlobRef
3327
+
return Response.json({
3328
+
blob: {
3329
+
$type: 'blob',
3330
+
ref: { $link: cidStr },
3331
+
mimeType,
3332
+
size,
3333
+
},
3334
+
});
3335
+
}
3336
+
3337
+
/** @param {URL} url */
3338
+
async handleGetBlob(url) {
3339
+
const did = url.searchParams.get('did');
3340
+
const cid = url.searchParams.get('cid');
3341
+
3342
+
if (!did || !cid) {
3343
+
return errorResponse(
3344
+
'InvalidRequest',
3345
+
'missing did or cid parameter',
3346
+
400,
3347
+
);
3348
+
}
3349
+
3350
+
// Validate CID format (CIDv1 base32lower: starts with 'b', 59 chars total)
3351
+
if (!/^b[a-z2-7]{58}$/.test(cid)) {
3352
+
return errorResponse('InvalidRequest', 'invalid CID format', 400);
3353
+
}
3354
+
3355
+
// Verify DID matches this DO
3356
+
const myDid = await this.getDid();
3357
+
if (did !== myDid) {
3358
+
return errorResponse(
3359
+
'InvalidRequest',
3360
+
'DID does not match this repo',
3361
+
400,
3362
+
);
3363
+
}
3364
+
3365
+
// Look up blob metadata
3366
+
const rows = this.sql
3367
+
.exec('SELECT mime_type, size FROM blobs WHERE cid = ?', cid)
3368
+
.toArray();
3369
+
3370
+
if (rows.length === 0) {
3371
+
return errorResponse('BlobNotFound', 'blob not found', 404);
3372
+
}
3373
+
3374
+
const { mime_type, size } = rows[0];
3375
+
3376
+
// Fetch from R2
3377
+
const r2Key = `${did}/${cid}`;
3378
+
const object = await this.env?.BLOBS?.get(r2Key);
3379
+
3380
+
if (!object) {
3381
+
return errorResponse('BlobNotFound', 'blob not found in storage', 404);
3382
+
}
3383
+
3384
+
// Return blob with security headers
3385
+
return new Response(object.body, {
3386
+
headers: {
3387
+
'Content-Type': /** @type {string} */ (mime_type),
3388
+
'Content-Length': String(size),
3389
+
'X-Content-Type-Options': 'nosniff',
3390
+
'Content-Security-Policy': "default-src 'none'; sandbox",
3391
+
'Cache-Control': 'public, max-age=31536000, immutable',
3392
+
},
3393
+
});
3394
+
}
3395
+
3396
+
/** @param {URL} url */
3397
+
async handleListBlobs(url) {
3398
+
const did = url.searchParams.get('did');
3399
+
const cursor = url.searchParams.get('cursor');
3400
+
const limit = Math.min(Number(url.searchParams.get('limit')) || 500, 1000);
3401
+
3402
+
if (!did) {
3403
+
return errorResponse('InvalidRequest', 'missing did parameter', 400);
3404
+
}
3405
+
3406
+
// Verify DID matches this DO
3407
+
const myDid = await this.getDid();
3408
+
if (did !== myDid) {
3409
+
return errorResponse(
3410
+
'InvalidRequest',
3411
+
'DID does not match this repo',
3412
+
400,
3413
+
);
1309
3414
}
1310
-
const blocks = this.sql.exec(`SELECT cid, data FROM blocks`).toArray()
1311
-
const blocksForCar = blocks.map(b => ({
1312
-
cid: b.cid,
1313
-
data: new Uint8Array(b.data)
1314
-
}))
1315
-
const car = buildCarFile(commits[0].cid, blocksForCar)
1316
-
return new Response(car, {
1317
-
headers: { 'content-type': 'application/vnd.ipld.car' }
1318
-
})
3415
+
3416
+
// Query blobs with pagination (cursor is created_at::cid for uniqueness)
3417
+
let query = 'SELECT cid, created_at FROM blobs';
3418
+
const params = [];
3419
+
3420
+
if (cursor) {
3421
+
const [cursorTime, cursorCid] = cursor.split('::');
3422
+
query += ' WHERE (created_at > ? OR (created_at = ? AND cid > ?))';
3423
+
params.push(cursorTime, cursorTime, cursorCid);
3424
+
}
3425
+
3426
+
query += ' ORDER BY created_at ASC, cid ASC LIMIT ?';
3427
+
params.push(limit + 1); // Fetch one extra to detect if there's more
3428
+
3429
+
const rows = this.sql.exec(query, ...params).toArray();
3430
+
3431
+
// Determine if there's a next page
3432
+
let nextCursor = null;
3433
+
if (rows.length > limit) {
3434
+
rows.pop(); // Remove the extra row
3435
+
const last = rows[rows.length - 1];
3436
+
nextCursor = `${last.created_at}::${last.cid}`;
3437
+
}
3438
+
3439
+
return Response.json({
3440
+
cids: rows.map((r) => r.cid),
3441
+
cursor: nextCursor,
3442
+
});
1319
3443
}
1320
3444
3445
+
/**
3446
+
* @param {Request} request
3447
+
* @param {URL} url
3448
+
*/
1321
3449
handleSubscribeRepos(request, url) {
1322
-
const upgradeHeader = request.headers.get('Upgrade')
3450
+
const upgradeHeader = request.headers.get('Upgrade');
1323
3451
if (upgradeHeader !== 'websocket') {
1324
-
return new Response('expected websocket', { status: 426 })
3452
+
return new Response('expected websocket', { status: 426 });
1325
3453
}
1326
-
const { 0: client, 1: server } = new WebSocketPair()
1327
-
this.state.acceptWebSocket(server)
1328
-
const cursor = url.searchParams.get('cursor')
3454
+
const { 0: client, 1: server } = new WebSocketPair();
3455
+
this.state.acceptWebSocket(server);
3456
+
const cursor = url.searchParams.get('cursor');
1329
3457
if (cursor) {
1330
-
const events = this.sql.exec(
1331
-
`SELECT * FROM seq_events WHERE seq > ? ORDER BY seq`,
1332
-
parseInt(cursor)
1333
-
).toArray()
3458
+
const events = /** @type {SeqEventRow[]} */ (
3459
+
this.sql
3460
+
.exec(
3461
+
`SELECT * FROM seq_events WHERE seq > ? ORDER BY seq`,
3462
+
parseInt(cursor, 10),
3463
+
)
3464
+
.toArray()
3465
+
);
1334
3466
for (const evt of events) {
1335
-
server.send(this.formatEvent(evt))
3467
+
server.send(this.formatEvent(evt));
1336
3468
}
1337
3469
}
1338
-
return new Response(null, { status: 101, webSocket: client })
3470
+
return new Response(null, { status: 101, webSocket: client });
1339
3471
}
1340
3472
3473
+
/** @param {Request} request */
1341
3474
async fetch(request) {
1342
-
const url = new URL(request.url)
1343
-
const route = pdsRoutes[url.pathname]
3475
+
const url = new URL(request.url);
3476
+
const route = pdsRoutes[url.pathname];
3477
+
3478
+
// Check for local route first
3479
+
if (route) {
3480
+
if (route.method && request.method !== route.method) {
3481
+
return errorResponse('MethodNotAllowed', 'method not allowed', 405);
3482
+
}
3483
+
return route.handler(this, request, url);
3484
+
}
3485
+
3486
+
// Handle app.bsky.* proxy requests (only if no local route)
3487
+
if (url.pathname.startsWith('/xrpc/app.bsky.')) {
3488
+
const userDid = request.headers.get('x-authed-did');
3489
+
if (!userDid) {
3490
+
return errorResponse('Unauthorized', 'Missing auth context', 401);
3491
+
}
3492
+
return this.handleAppViewProxy(request, userDid);
3493
+
}
3494
+
3495
+
return errorResponse('NotFound', 'not found', 404);
3496
+
}
3497
+
3498
+
async alarm() {
3499
+
await this.cleanupOrphanedBlobs();
3500
+
// Reschedule for next day
3501
+
await this.state.storage.setAlarm(Date.now() + 24 * 60 * 60 * 1000);
3502
+
}
3503
+
3504
+
async cleanupOrphanedBlobs() {
3505
+
const did = await this.getDid();
3506
+
if (!did) return;
3507
+
3508
+
// Find orphans: blobs not in record_blobs, older than 24h
3509
+
const cutoff = new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString();
3510
+
3511
+
const orphans = this.sql
3512
+
.exec(
3513
+
`SELECT b.cid FROM blobs b
3514
+
LEFT JOIN record_blobs rb ON b.cid = rb.blob_cid
3515
+
WHERE rb.blob_cid IS NULL AND b.created_at < ?`,
3516
+
cutoff,
3517
+
)
3518
+
.toArray();
3519
+
3520
+
for (const { cid } of orphans) {
3521
+
await this.env?.BLOBS?.delete(`${did}/${cid}`);
3522
+
this.sql.exec('DELETE FROM blobs WHERE cid = ?', cid);
3523
+
}
3524
+
}
3525
+
3526
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
3527
+
// โ OAUTH HANDLERS โ
3528
+
// โ OAuth 2.0 authorization server with DPoP, PKCE, and token management โ
3529
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
3530
+
3531
+
/**
3532
+
* Check if a DPoP jti has been used and mark it as used.
3533
+
* Returns true if the jti is fresh (not seen before), false if it's a replay.
3534
+
* Also cleans up expired jtis.
3535
+
* @param {string} jti - The DPoP proof jti to check
3536
+
* @param {number} iat - The iat claim from the DPoP proof (unix timestamp)
3537
+
* @returns {boolean} True if jti is fresh, false if replay
3538
+
*/
3539
+
checkAndStoreDpopJti(jti, iat) {
3540
+
// Clean up expired jtis (older than 5 minutes)
3541
+
const cutoff = new Date(Date.now() - 5 * 60 * 1000).toISOString();
3542
+
this.sql.exec(`DELETE FROM dpop_jtis WHERE expires_at < ?`, cutoff);
3543
+
3544
+
// Check if jti already exists
3545
+
const existing = this.sql
3546
+
.exec(`SELECT 1 FROM dpop_jtis WHERE jti = ?`, jti)
3547
+
.toArray();
3548
+
if (existing.length > 0) {
3549
+
return false; // Replay attack
3550
+
}
3551
+
3552
+
// Store jti with expiration (iat + 5 minutes)
3553
+
const expiresAt = new Date((iat + 300) * 1000).toISOString();
3554
+
this.sql.exec(
3555
+
`INSERT INTO dpop_jtis (jti, expires_at) VALUES (?, ?)`,
3556
+
jti,
3557
+
expiresAt,
3558
+
);
3559
+
return true;
3560
+
}
3561
+
3562
+
/**
3563
+
* Clean up expired authorization requests.
3564
+
* Should be called periodically to prevent table bloat.
3565
+
* @returns {number} Number of expired requests deleted
3566
+
*/
3567
+
cleanupExpiredAuthorizationRequests() {
3568
+
const now = new Date().toISOString();
3569
+
const result = this.sql.exec(
3570
+
`DELETE FROM authorization_requests WHERE expires_at < ?`,
3571
+
now,
3572
+
);
3573
+
return result.rowsWritten;
3574
+
}
3575
+
3576
+
/**
3577
+
* Validate a required DPoP proof header, parse it, and check for replay attacks.
3578
+
* @param {Request} request - The incoming request
3579
+
* @param {string} method - Expected HTTP method
3580
+
* @param {string} uri - Expected request URI
3581
+
* @returns {Promise<{ dpop: DpopProofResult } | { error: Response }>} The parsed DPoP proof or error response
3582
+
*/
3583
+
async validateRequiredDpop(request, method, uri) {
3584
+
const dpopHeader = request.headers.get('DPoP');
3585
+
if (!dpopHeader) {
3586
+
return {
3587
+
error: errorResponse('invalid_dpop_proof', 'DPoP proof required', 400),
3588
+
};
3589
+
}
3590
+
3591
+
let dpop;
3592
+
try {
3593
+
dpop = await parseDpopProof(dpopHeader, method, uri);
3594
+
} catch (err) {
3595
+
return { error: errorResponse('invalid_dpop_proof', err.message, 400) };
3596
+
}
3597
+
3598
+
if (!this.checkAndStoreDpopJti(dpop.jti, dpop.iat)) {
3599
+
return {
3600
+
error: errorResponse(
3601
+
'invalid_dpop_proof',
3602
+
'DPoP proof replay detected',
3603
+
400,
3604
+
),
3605
+
};
3606
+
}
3607
+
3608
+
return { dpop };
3609
+
}
3610
+
3611
+
/**
3612
+
* Get or create the OAuth signing key for this PDS instance.
3613
+
* Lazily generates a new key if one doesn't exist.
3614
+
* @returns {Promise<string>} The private key as hex string
3615
+
*/
3616
+
async getOAuthPrivateKey() {
3617
+
let privateKeyHex = /** @type {string|undefined} */ (
3618
+
await this.state.storage.get('oauthPrivateKey')
3619
+
);
3620
+
if (!privateKeyHex) {
3621
+
// Generate a new OAuth signing key
3622
+
const keyPair = await crypto.subtle.generateKey(
3623
+
{ name: 'ECDSA', namedCurve: 'P-256' },
3624
+
true,
3625
+
['sign', 'verify'],
3626
+
);
3627
+
const rawKey = await crypto.subtle.exportKey('pkcs8', keyPair.privateKey);
3628
+
// Extract the 32-byte private key from PKCS#8 (last 32 bytes after the prefix)
3629
+
const keyBytes = new Uint8Array(rawKey).slice(-32);
3630
+
privateKeyHex = bytesToHex(keyBytes);
3631
+
await this.state.storage.put('oauthPrivateKey', privateKeyHex);
3632
+
}
3633
+
return privateKeyHex;
3634
+
}
3635
+
3636
+
/**
3637
+
* Get the PDS signing key as a public JWK.
3638
+
* Exports only the public components (kty, crv, x, y) for use in JWKS.
3639
+
* @returns {Promise<{ kty: string, crv: string, x: string, y: string }>} The public key in JWK format
3640
+
* @throws {Error} If the PDS is not initialized
3641
+
*/
3642
+
async getPublicKeyJwk() {
3643
+
const privateKeyHex = await this.getOAuthPrivateKey();
3644
+
if (!privateKeyHex) throw new Error('PDS not initialized');
3645
+
3646
+
// Import key with extractable=true to export public components
3647
+
const privateKeyBytes = hexToBytes(privateKeyHex);
3648
+
const pkcs8Prefix = new Uint8Array([
3649
+
0x30, 0x41, 0x02, 0x01, 0x00, 0x30, 0x13, 0x06, 0x07, 0x2a, 0x86, 0x48,
3650
+
0xce, 0x3d, 0x02, 0x01, 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x03,
3651
+
0x01, 0x07, 0x04, 0x27, 0x30, 0x25, 0x02, 0x01, 0x01, 0x04, 0x20,
3652
+
]);
3653
+
const pkcs8 = new Uint8Array(pkcs8Prefix.length + 32);
3654
+
pkcs8.set(pkcs8Prefix);
3655
+
pkcs8.set(privateKeyBytes, pkcs8Prefix.length);
3656
+
3657
+
const privateKey = await crypto.subtle.importKey(
3658
+
'pkcs8',
3659
+
pkcs8,
3660
+
{ name: 'ECDSA', namedCurve: 'P-256' },
3661
+
true,
3662
+
['sign'],
3663
+
);
3664
+
const jwk = await crypto.subtle.exportKey('jwk', privateKey);
3665
+
return {
3666
+
kty: /** @type {string} */ (jwk.kty),
3667
+
crv: /** @type {string} */ (jwk.crv),
3668
+
x: /** @type {string} */ (jwk.x),
3669
+
y: /** @type {string} */ (jwk.y),
3670
+
};
3671
+
}
3672
+
3673
+
/**
3674
+
* Handle OAuth Authorization Server Metadata endpoint.
3675
+
* @param {URL} url - Parsed request URL
3676
+
* @returns {Response} JSON response with OAuth AS metadata
3677
+
*/
3678
+
handleOAuthAuthServerMetadata(url) {
3679
+
const issuer = `${url.protocol}//${url.host}`;
3680
+
return Response.json({
3681
+
issuer,
3682
+
authorization_endpoint: `${issuer}/oauth/authorize`,
3683
+
token_endpoint: `${issuer}/oauth/token`,
3684
+
revocation_endpoint: `${issuer}/oauth/revoke`,
3685
+
pushed_authorization_request_endpoint: `${issuer}/oauth/par`,
3686
+
jwks_uri: `${issuer}/oauth/jwks`,
3687
+
scopes_supported: ['atproto'],
3688
+
subject_types_supported: ['public'],
3689
+
response_types_supported: ['code'],
3690
+
response_modes_supported: ['query', 'fragment'],
3691
+
grant_types_supported: ['authorization_code', 'refresh_token'],
3692
+
code_challenge_methods_supported: ['S256'],
3693
+
token_endpoint_auth_methods_supported: ['none'],
3694
+
dpop_signing_alg_values_supported: ['ES256'],
3695
+
require_pushed_authorization_requests: false,
3696
+
authorization_response_iss_parameter_supported: true,
3697
+
client_id_metadata_document_supported: true,
3698
+
protected_resources: [issuer],
3699
+
});
3700
+
}
3701
+
3702
+
/**
3703
+
* Handle OAuth Protected Resource Metadata endpoint.
3704
+
* @param {URL} url - Parsed request URL
3705
+
* @returns {Response} JSON response with OAuth PR metadata
3706
+
*/
3707
+
handleOAuthProtectedResource(url) {
3708
+
const resource = `${url.protocol}//${url.host}`;
3709
+
return Response.json({
3710
+
resource,
3711
+
authorization_servers: [resource],
3712
+
bearer_methods_supported: ['header'],
3713
+
scopes_supported: ['atproto'],
3714
+
});
3715
+
}
3716
+
3717
+
/**
3718
+
* Handle OAuth JWKS endpoint.
3719
+
* @returns {Promise<Response>} JSON response with JWKS
3720
+
*/
3721
+
async handleOAuthJwks() {
3722
+
const publicKeyJwk = await this.getPublicKeyJwk();
3723
+
return Response.json({
3724
+
keys: [
3725
+
{ ...publicKeyJwk, kid: 'pds-oauth-key', use: 'sig', alg: 'ES256' },
3726
+
],
3727
+
});
3728
+
}
3729
+
3730
+
/**
3731
+
* Validate OAuth authorization request parameters.
3732
+
* Shared between PAR and direct authorization flows.
3733
+
* @param {Object} params - The authorization parameters
3734
+
* @param {string | undefined | null} params.clientId - The client_id
3735
+
* @param {string | undefined | null} params.redirectUri - The redirect_uri
3736
+
* @param {string | undefined | null} params.responseType - The response_type
3737
+
* @param {string | undefined | null} params.codeChallenge - The code_challenge
3738
+
* @param {string | undefined | null} params.codeChallengeMethod - The code_challenge_method
3739
+
* @returns {Promise<{error: Response} | {clientMetadata: ClientMetadata}>}
3740
+
*/
3741
+
async validateAuthorizationParameters({
3742
+
clientId,
3743
+
redirectUri,
3744
+
responseType,
3745
+
codeChallenge,
3746
+
codeChallengeMethod,
3747
+
}) {
3748
+
if (!clientId) {
3749
+
return {
3750
+
error: errorResponse('invalid_request', 'client_id required', 400),
3751
+
};
3752
+
}
3753
+
if (!redirectUri) {
3754
+
return {
3755
+
error: errorResponse('invalid_request', 'redirect_uri required', 400),
3756
+
};
3757
+
}
3758
+
if (responseType !== 'code') {
3759
+
return {
3760
+
error: errorResponse(
3761
+
'unsupported_response_type',
3762
+
'response_type must be code',
3763
+
400,
3764
+
),
3765
+
};
3766
+
}
3767
+
if (!codeChallenge || codeChallengeMethod !== 'S256') {
3768
+
return {
3769
+
error: errorResponse('invalid_request', 'PKCE with S256 required', 400),
3770
+
};
3771
+
}
3772
+
3773
+
let clientMetadata;
3774
+
try {
3775
+
clientMetadata = await getClientMetadata(clientId);
3776
+
} catch (err) {
3777
+
return { error: errorResponse('invalid_client', err.message, 400) };
3778
+
}
3779
+
3780
+
// Validate redirect_uri against registered URIs
3781
+
const isLoopback =
3782
+
clientId.startsWith('http://localhost') ||
3783
+
clientId.startsWith('http://127.0.0.1');
3784
+
const redirectUriValid = clientMetadata.redirect_uris.some((uri) => {
3785
+
if (isLoopback) {
3786
+
try {
3787
+
const registered = new URL(uri);
3788
+
const requested = new URL(redirectUri);
3789
+
return registered.origin === requested.origin;
3790
+
} catch {
3791
+
return false;
3792
+
}
3793
+
}
3794
+
return uri === redirectUri;
3795
+
});
3796
+
if (!redirectUriValid) {
3797
+
return {
3798
+
error: errorResponse(
3799
+
'invalid_request',
3800
+
'redirect_uri not registered for this client',
3801
+
400,
3802
+
),
3803
+
};
3804
+
}
3805
+
3806
+
return { clientMetadata };
3807
+
}
3808
+
3809
+
/**
3810
+
* Handle Pushed Authorization Request (PAR) endpoint.
3811
+
* Validates DPoP proof, client metadata, PKCE parameters, and stores the authorization request.
3812
+
* @param {Request} request - The incoming request
3813
+
* @param {URL} url - Parsed request URL
3814
+
* @returns {Promise<Response>} JSON response with request_uri and expires_in
3815
+
*/
3816
+
async handleOAuthPar(request, url) {
3817
+
// Opportunistically clean up expired authorization requests
3818
+
this.cleanupExpiredAuthorizationRequests();
3819
+
3820
+
const issuer = `${url.protocol}//${url.host}`;
3821
+
3822
+
const dpopResult = await this.validateRequiredDpop(
3823
+
request,
3824
+
'POST',
3825
+
`${issuer}/oauth/par`,
3826
+
);
3827
+
if ('error' in dpopResult) return dpopResult.error;
3828
+
const { dpop } = dpopResult;
3829
+
3830
+
// Parse body - support both JSON and form-encoded
3831
+
/** @type {Record<string, string|undefined>} */
3832
+
let data;
3833
+
try {
3834
+
data = await parseRequestBody(request);
3835
+
} catch {
3836
+
return errorResponse('invalid_request', 'Invalid JSON body', 400);
3837
+
}
3838
+
3839
+
const clientId = data.client_id;
3840
+
const redirectUri = data.redirect_uri;
3841
+
const responseType = data.response_type;
3842
+
const responseMode = data.response_mode;
3843
+
const scope = data.scope;
3844
+
const state = data.state;
3845
+
const codeChallenge = data.code_challenge;
3846
+
const codeChallengeMethod = data.code_challenge_method;
3847
+
const loginHint = data.login_hint;
3848
+
3849
+
// Use shared validation
3850
+
const validationResult = await this.validateAuthorizationParameters({
3851
+
clientId,
3852
+
redirectUri,
3853
+
responseType,
3854
+
codeChallenge,
3855
+
codeChallengeMethod,
3856
+
});
3857
+
if ('error' in validationResult) return validationResult.error;
3858
+
const { clientMetadata } = validationResult;
3859
+
3860
+
const requestId = crypto.randomUUID();
3861
+
const requestUri = `urn:ietf:params:oauth:request_uri:${requestId}`;
3862
+
const expiresIn = 600;
3863
+
const expiresAt = new Date(Date.now() + expiresIn * 1000).toISOString();
3864
+
3865
+
this.sql.exec(
3866
+
`INSERT INTO authorization_requests (
3867
+
id, client_id, client_metadata, parameters,
3868
+
code_challenge, code_challenge_method, dpop_jkt,
3869
+
expires_at, created_at
3870
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
3871
+
requestId,
3872
+
clientId,
3873
+
JSON.stringify(clientMetadata),
3874
+
JSON.stringify({
3875
+
redirect_uri: redirectUri,
3876
+
scope,
3877
+
state,
3878
+
response_mode: responseMode,
3879
+
login_hint: loginHint,
3880
+
}),
3881
+
codeChallenge,
3882
+
codeChallengeMethod,
3883
+
dpop.jkt,
3884
+
expiresAt,
3885
+
new Date().toISOString(),
3886
+
);
3887
+
3888
+
return Response.json({ request_uri: requestUri, expires_in: expiresIn });
3889
+
}
3890
+
3891
+
/**
3892
+
* Handle OAuth Authorize endpoint - displays consent UI (GET) or processes approval (POST).
3893
+
* @param {Request} request - The incoming request
3894
+
* @param {URL} url - Parsed request URL
3895
+
* @returns {Promise<Response>} HTML consent page or redirect
3896
+
*/
3897
+
async handleOAuthAuthorize(request, url) {
3898
+
if (request.method === 'GET') {
3899
+
return this.handleOAuthAuthorizeGet(url);
3900
+
} else if (request.method === 'POST') {
3901
+
return this.handleOAuthAuthorizePost(request, url);
3902
+
}
3903
+
return errorResponse('MethodNotAllowed', 'Method not allowed', 405);
3904
+
}
3905
+
3906
+
/**
3907
+
* Handle GET /oauth/authorize - displays the consent UI.
3908
+
* Supports both PAR (request_uri) and direct authorization parameters.
3909
+
* @param {URL} url - Parsed request URL
3910
+
* @returns {Promise<Response>} HTML consent page
3911
+
*/
3912
+
async handleOAuthAuthorizeGet(url) {
3913
+
// Opportunistically clean up expired authorization requests
3914
+
this.cleanupExpiredAuthorizationRequests();
3915
+
3916
+
const requestUri = url.searchParams.get('request_uri');
3917
+
const clientId = url.searchParams.get('client_id');
3918
+
3919
+
// If request_uri is present, use PAR flow
3920
+
if (requestUri) {
3921
+
if (!clientId) {
3922
+
return new Response('Missing client_id parameter', { status: 400 });
3923
+
}
3924
+
3925
+
const match = requestUri.match(
3926
+
/^urn:ietf:params:oauth:request_uri:(.+)$/,
3927
+
);
3928
+
if (!match) return new Response('Invalid request_uri', { status: 400 });
3929
+
3930
+
const rows = this.sql
3931
+
.exec(
3932
+
`SELECT * FROM authorization_requests WHERE id = ? AND client_id = ?`,
3933
+
match[1],
3934
+
clientId,
3935
+
)
3936
+
.toArray();
3937
+
const authRequest = rows[0];
3938
+
3939
+
if (!authRequest)
3940
+
return new Response('Request not found', { status: 400 });
3941
+
if (new Date(/** @type {string} */ (authRequest.expires_at)) < new Date())
3942
+
return new Response('Request expired', { status: 400 });
3943
+
if (authRequest.code)
3944
+
return new Response('Request already used', { status: 400 });
3945
+
3946
+
const clientMetadata = JSON.parse(
3947
+
/** @type {string} */ (authRequest.client_metadata),
3948
+
);
3949
+
const parameters = JSON.parse(
3950
+
/** @type {string} */ (authRequest.parameters),
3951
+
);
3952
+
3953
+
return new Response(
3954
+
renderConsentPage({
3955
+
clientName: clientMetadata.client_name || clientId,
3956
+
clientId: clientId || '',
3957
+
scope: parameters.scope || 'atproto',
3958
+
requestUri: requestUri || '',
3959
+
loginHint: parameters.login_hint || '',
3960
+
}),
3961
+
{
3962
+
status: 200,
3963
+
headers: { 'Content-Type': 'text/html; charset=utf-8' },
3964
+
},
3965
+
);
3966
+
}
3967
+
3968
+
// Direct authorization flow - create request on-the-fly
3969
+
if (!clientId) {
3970
+
return new Response('Missing client_id parameter', { status: 400 });
3971
+
}
3972
+
3973
+
const redirectUri = url.searchParams.get('redirect_uri');
3974
+
const responseType = url.searchParams.get('response_type');
3975
+
const responseMode = url.searchParams.get('response_mode');
3976
+
const scope = url.searchParams.get('scope');
3977
+
const state = url.searchParams.get('state');
3978
+
const codeChallenge = url.searchParams.get('code_challenge');
3979
+
const codeChallengeMethod = url.searchParams.get('code_challenge_method');
3980
+
const loginHint = url.searchParams.get('login_hint');
3981
+
3982
+
// Validate parameters using shared helper
3983
+
const validationResult = await this.validateAuthorizationParameters({
3984
+
clientId,
3985
+
redirectUri,
3986
+
responseType,
3987
+
codeChallenge,
3988
+
codeChallengeMethod,
3989
+
});
3990
+
if ('error' in validationResult) return validationResult.error;
3991
+
const { clientMetadata } = validationResult;
3992
+
3993
+
// Create authorization request record (same as PAR but without DPoP)
3994
+
const requestId = crypto.randomUUID();
3995
+
const newRequestUri = `urn:ietf:params:oauth:request_uri:${requestId}`;
3996
+
const expiresIn = 600;
3997
+
const expiresAt = new Date(Date.now() + expiresIn * 1000).toISOString();
3998
+
3999
+
this.sql.exec(
4000
+
`INSERT INTO authorization_requests (
4001
+
id, client_id, client_metadata, parameters,
4002
+
code_challenge, code_challenge_method, dpop_jkt,
4003
+
expires_at, created_at
4004
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
4005
+
requestId,
4006
+
clientId,
4007
+
JSON.stringify(clientMetadata),
4008
+
JSON.stringify({
4009
+
redirect_uri: redirectUri,
4010
+
scope,
4011
+
state,
4012
+
response_mode: responseMode,
4013
+
login_hint: loginHint,
4014
+
}),
4015
+
codeChallenge,
4016
+
codeChallengeMethod,
4017
+
null, // No DPoP for direct authorization - will be bound at token exchange
4018
+
expiresAt,
4019
+
new Date().toISOString(),
4020
+
);
4021
+
4022
+
return new Response(
4023
+
renderConsentPage({
4024
+
clientName: clientMetadata.client_name || clientId,
4025
+
clientId: clientId,
4026
+
scope: scope || 'atproto',
4027
+
requestUri: newRequestUri,
4028
+
loginHint: loginHint || '',
4029
+
}),
4030
+
{ status: 200, headers: { 'Content-Type': 'text/html; charset=utf-8' } },
4031
+
);
4032
+
}
4033
+
4034
+
/**
4035
+
* Handle POST /oauth/authorize - processes user approval/denial.
4036
+
* Validates password, generates authorization code on approval, redirects to client.
4037
+
* @param {Request} request - The incoming request
4038
+
* @param {URL} url - Parsed request URL
4039
+
* @returns {Promise<Response>} Redirect to client redirect_uri with code or error
4040
+
*/
4041
+
async handleOAuthAuthorizePost(request, url) {
4042
+
const issuer = `${url.protocol}//${url.host}`;
4043
+
const body = await request.text();
4044
+
const params = new URLSearchParams(body);
4045
+
4046
+
const requestUri = params.get('request_uri');
4047
+
const clientId = params.get('client_id');
4048
+
const password = params.get('password');
4049
+
const action = params.get('action');
4050
+
4051
+
const match = requestUri?.match(/^urn:ietf:params:oauth:request_uri:(.+)$/);
4052
+
if (!match) return new Response('Invalid request_uri', { status: 400 });
4053
+
4054
+
const authRows = this.sql
4055
+
.exec(
4056
+
`SELECT * FROM authorization_requests WHERE id = ? AND client_id = ?`,
4057
+
match[1],
4058
+
clientId,
4059
+
)
4060
+
.toArray();
4061
+
const authRequest = authRows[0];
4062
+
if (!authRequest) return new Response('Request not found', { status: 400 });
4063
+
4064
+
const clientMetadata = JSON.parse(
4065
+
/** @type {string} */ (authRequest.client_metadata),
4066
+
);
4067
+
const parameters = JSON.parse(
4068
+
/** @type {string} */ (authRequest.parameters),
4069
+
);
4070
+
4071
+
if (action === 'deny') {
4072
+
this.sql.exec(
4073
+
`DELETE FROM authorization_requests WHERE id = ?`,
4074
+
match[1],
4075
+
);
4076
+
const errorUrl = new URL(parameters.redirect_uri);
4077
+
errorUrl.searchParams.set('error', 'access_denied');
4078
+
if (parameters.state)
4079
+
errorUrl.searchParams.set('state', parameters.state);
4080
+
errorUrl.searchParams.set('iss', issuer);
4081
+
return Response.redirect(errorUrl.toString(), 302);
4082
+
}
4083
+
4084
+
// Timing-safe password comparison
4085
+
const expectedPwd = this.env?.PDS_PASSWORD;
4086
+
const passwordValid =
4087
+
password && expectedPwd && (await timingSafeEqual(password, expectedPwd));
4088
+
if (!passwordValid) {
4089
+
return new Response(
4090
+
renderConsentPage({
4091
+
clientName: clientMetadata.client_name || clientId,
4092
+
clientId: clientId || '',
4093
+
scope: parameters.scope || 'atproto',
4094
+
requestUri: requestUri || '',
4095
+
error: 'Invalid password',
4096
+
}),
4097
+
{
4098
+
status: 200,
4099
+
headers: { 'Content-Type': 'text/html; charset=utf-8' },
4100
+
},
4101
+
);
4102
+
}
4103
+
4104
+
const code = base64UrlEncode(crypto.getRandomValues(new Uint8Array(32)));
4105
+
4106
+
// Resolve DID from login_hint (can be handle or DID)
4107
+
let did = parameters.login_hint;
4108
+
if (did && !did.startsWith('did:')) {
4109
+
// It's a handle, resolve to DID
4110
+
const handleMap = /** @type {Record<string, string>} */ (
4111
+
(await this.state.storage.get('handleMap')) || {}
4112
+
);
4113
+
did = handleMap[did];
4114
+
}
4115
+
if (!did) {
4116
+
return new Response('Could not resolve user', { status: 400 });
4117
+
}
4118
+
4119
+
this.sql.exec(
4120
+
`UPDATE authorization_requests SET code = ?, did = ? WHERE id = ?`,
4121
+
code,
4122
+
did,
4123
+
match[1],
4124
+
);
4125
+
4126
+
const successUrl = new URL(parameters.redirect_uri);
4127
+
if (parameters.response_mode === 'fragment') {
4128
+
const fragParams = new URLSearchParams();
4129
+
fragParams.set('code', code);
4130
+
if (parameters.state) fragParams.set('state', parameters.state);
4131
+
fragParams.set('iss', issuer);
4132
+
successUrl.hash = fragParams.toString();
4133
+
} else {
4134
+
successUrl.searchParams.set('code', code);
4135
+
if (parameters.state)
4136
+
successUrl.searchParams.set('state', parameters.state);
4137
+
successUrl.searchParams.set('iss', issuer);
4138
+
}
4139
+
return Response.redirect(successUrl.toString(), 302);
4140
+
}
4141
+
4142
+
/**
4143
+
* Handle token endpoint - exchanges authorization codes for tokens.
4144
+
* Supports authorization_code and refresh_token grant types.
4145
+
* @param {Request} request - The incoming request
4146
+
* @param {URL} url - Parsed request URL
4147
+
* @returns {Promise<Response>} JSON response with access_token, token_type, expires_in, refresh_token, scope
4148
+
*/
4149
+
async handleOAuthToken(request, url) {
4150
+
const issuer = `${url.protocol}//${url.host}`;
4151
+
4152
+
const dpopResult = await this.validateRequiredDpop(
4153
+
request,
4154
+
'POST',
4155
+
`${issuer}/oauth/token`,
4156
+
);
4157
+
if ('error' in dpopResult) return dpopResult.error;
4158
+
const { dpop } = dpopResult;
4159
+
4160
+
const contentType = request.headers.get('content-type') || '';
4161
+
const body = await request.text();
4162
+
/** @type {URLSearchParams | Map<string, string>} */
4163
+
let params;
4164
+
if (contentType.includes('application/json')) {
4165
+
try {
4166
+
const json = JSON.parse(body);
4167
+
params = new Map(Object.entries(json));
4168
+
} catch {
4169
+
return errorResponse('invalid_request', 'Invalid JSON body', 400);
4170
+
}
4171
+
} else {
4172
+
params = new URLSearchParams(body);
4173
+
}
4174
+
const grantType = params.get('grant_type');
4175
+
4176
+
if (grantType === 'authorization_code') {
4177
+
return this.handleAuthCodeGrant(params, dpop, issuer);
4178
+
} else if (grantType === 'refresh_token') {
4179
+
return this.handleRefreshGrant(params, dpop, issuer);
4180
+
}
4181
+
return errorResponse(
4182
+
'unsupported_grant_type',
4183
+
'Grant type not supported',
4184
+
400,
4185
+
);
4186
+
}
4187
+
4188
+
/**
4189
+
* Handle authorization_code grant type.
4190
+
* Validates the code, PKCE verifier, and DPoP binding, then issues tokens.
4191
+
* @param {URLSearchParams | Map<string, string>} params - Token request parameters
4192
+
* @param {DpopProofResult} dpop - Parsed DPoP proof
4193
+
* @param {string} issuer - The PDS issuer URL
4194
+
* @returns {Promise<Response>} JSON token response
4195
+
*/
4196
+
async handleAuthCodeGrant(params, dpop, issuer) {
4197
+
const code = params.get('code');
4198
+
const redirectUri = params.get('redirect_uri');
4199
+
const clientId = params.get('client_id');
4200
+
const codeVerifier = params.get('code_verifier');
1344
4201
1345
-
if (!route) {
1346
-
return Response.json({ error: 'not found' }, { status: 404 })
4202
+
if (!code || !redirectUri || !clientId || !codeVerifier) {
4203
+
return errorResponse(
4204
+
'invalid_request',
4205
+
'Missing required parameters',
4206
+
400,
4207
+
);
1347
4208
}
1348
-
if (route.method && request.method !== route.method) {
1349
-
return Response.json({ error: 'method not allowed' }, { status: 405 })
4209
+
4210
+
const authRows = this.sql
4211
+
.exec(`SELECT * FROM authorization_requests WHERE code = ?`, code)
4212
+
.toArray();
4213
+
const authRequest = authRows[0];
4214
+
if (!authRequest)
4215
+
return errorResponse('invalid_grant', 'Invalid code', 400);
4216
+
if (authRequest.client_id !== clientId)
4217
+
return errorResponse('invalid_grant', 'Client mismatch', 400);
4218
+
// For PAR flow, dpop_jkt is set at PAR time and must match
4219
+
// For direct authorization, dpop_jkt is null and we bind to the token request's DPoP
4220
+
if (authRequest.dpop_jkt !== null && authRequest.dpop_jkt !== dpop.jkt) {
4221
+
return errorResponse('invalid_dpop_proof', 'DPoP key mismatch', 400);
1350
4222
}
1351
-
return route.handler(this, request, url)
4223
+
4224
+
const parameters = JSON.parse(
4225
+
/** @type {string} */ (authRequest.parameters),
4226
+
);
4227
+
if (parameters.redirect_uri !== redirectUri)
4228
+
return errorResponse('invalid_grant', 'Redirect URI mismatch', 400);
4229
+
4230
+
// Verify PKCE
4231
+
const challengeHash = await crypto.subtle.digest(
4232
+
'SHA-256',
4233
+
new TextEncoder().encode(codeVerifier),
4234
+
);
4235
+
const computedChallenge = base64UrlEncode(new Uint8Array(challengeHash));
4236
+
if (computedChallenge !== authRequest.code_challenge) {
4237
+
return errorResponse('invalid_grant', 'Invalid code_verifier', 400);
4238
+
}
4239
+
4240
+
this.sql.exec(
4241
+
`DELETE FROM authorization_requests WHERE id = ?`,
4242
+
authRequest.id,
4243
+
);
4244
+
4245
+
const tokenId = crypto.randomUUID();
4246
+
const refreshToken = base64UrlEncode(
4247
+
crypto.getRandomValues(new Uint8Array(32)),
4248
+
);
4249
+
const scope = parameters.scope || 'atproto';
4250
+
const now = new Date();
4251
+
const expiresIn = 3600;
4252
+
const subjectDid = /** @type {string} */ (authRequest.did);
4253
+
4254
+
const accessToken = await this.createOAuthAccessToken({
4255
+
issuer,
4256
+
subject: subjectDid,
4257
+
clientId,
4258
+
scope,
4259
+
tokenId,
4260
+
dpopJkt: dpop.jkt,
4261
+
expiresIn,
4262
+
});
4263
+
4264
+
this.sql.exec(
4265
+
`INSERT INTO tokens (token_id, did, client_id, scope, dpop_jkt, expires_at, refresh_token, created_at, updated_at)
4266
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
4267
+
tokenId,
4268
+
subjectDid,
4269
+
clientId,
4270
+
scope,
4271
+
dpop.jkt,
4272
+
new Date(now.getTime() + expiresIn * 1000).toISOString(),
4273
+
refreshToken,
4274
+
now.toISOString(),
4275
+
now.toISOString(),
4276
+
);
4277
+
4278
+
return Response.json({
4279
+
access_token: accessToken,
4280
+
token_type: 'DPoP',
4281
+
expires_in: expiresIn,
4282
+
refresh_token: refreshToken,
4283
+
scope,
4284
+
sub: subjectDid,
4285
+
});
1352
4286
}
4287
+
4288
+
/**
4289
+
* Handle refresh_token grant type.
4290
+
* Validates the refresh token, DPoP binding, and 24hr lifetime, then rotates tokens.
4291
+
* @param {URLSearchParams | Map<string, string>} params - Token request parameters
4292
+
* @param {DpopProofResult} dpop - Parsed DPoP proof
4293
+
* @param {string} issuer - The PDS issuer URL
4294
+
* @returns {Promise<Response>} JSON token response with new access and refresh tokens
4295
+
*/
4296
+
async handleRefreshGrant(params, dpop, issuer) {
4297
+
const refreshToken = params.get('refresh_token');
4298
+
const clientId = params.get('client_id');
4299
+
4300
+
if (!refreshToken || !clientId)
4301
+
return errorResponse(
4302
+
'invalid_request',
4303
+
'Missing required parameters',
4304
+
400,
4305
+
);
4306
+
4307
+
const tokenRows = this.sql
4308
+
.exec(`SELECT * FROM tokens WHERE refresh_token = ?`, refreshToken)
4309
+
.toArray();
4310
+
const token = tokenRows[0];
4311
+
4312
+
if (!token)
4313
+
return errorResponse('invalid_grant', 'Invalid refresh token', 400);
4314
+
if (token.client_id !== clientId)
4315
+
return errorResponse('invalid_grant', 'Client mismatch', 400);
4316
+
if (token.dpop_jkt !== dpop.jkt)
4317
+
return errorResponse('invalid_dpop_proof', 'DPoP key mismatch', 400);
4318
+
4319
+
// Check 24hr lifetime
4320
+
const createdAt = new Date(/** @type {string} */ (token.created_at));
4321
+
if (Date.now() - createdAt.getTime() > 24 * 60 * 60 * 1000) {
4322
+
this.sql.exec(`DELETE FROM tokens WHERE token_id = ?`, token.token_id);
4323
+
return errorResponse('invalid_grant', 'Refresh token expired', 400);
4324
+
}
4325
+
4326
+
const newTokenId = crypto.randomUUID();
4327
+
const newRefreshToken = base64UrlEncode(
4328
+
crypto.getRandomValues(new Uint8Array(32)),
4329
+
);
4330
+
const now = new Date();
4331
+
const expiresIn = 3600;
4332
+
const tokenDid = /** @type {string} */ (token.did);
4333
+
const tokenScope = /** @type {string} */ (token.scope);
4334
+
4335
+
const accessToken = await this.createOAuthAccessToken({
4336
+
issuer,
4337
+
subject: tokenDid,
4338
+
clientId,
4339
+
scope: tokenScope,
4340
+
tokenId: newTokenId,
4341
+
dpopJkt: dpop.jkt,
4342
+
expiresIn,
4343
+
});
4344
+
4345
+
this.sql.exec(
4346
+
`UPDATE tokens SET token_id = ?, refresh_token = ?, expires_at = ?, updated_at = ? WHERE token_id = ?`,
4347
+
newTokenId,
4348
+
newRefreshToken,
4349
+
new Date(now.getTime() + expiresIn * 1000).toISOString(),
4350
+
now.toISOString(),
4351
+
token.token_id,
4352
+
);
4353
+
4354
+
return Response.json({
4355
+
access_token: accessToken,
4356
+
token_type: 'DPoP',
4357
+
expires_in: expiresIn,
4358
+
refresh_token: newRefreshToken,
4359
+
scope: tokenScope,
4360
+
sub: tokenDid,
4361
+
});
4362
+
}
4363
+
4364
+
/**
4365
+
* Create a DPoP-bound access token (at+jwt).
4366
+
* @param {AccessTokenParams} params
4367
+
* @returns {Promise<string>} The signed JWT access token
4368
+
*/
4369
+
async createOAuthAccessToken({
4370
+
issuer,
4371
+
subject,
4372
+
clientId,
4373
+
scope,
4374
+
tokenId,
4375
+
dpopJkt,
4376
+
expiresIn,
4377
+
}) {
4378
+
const now = Math.floor(Date.now() / 1000);
4379
+
const header = { typ: 'at+jwt', alg: 'ES256', kid: 'pds-oauth-key' };
4380
+
const payload = {
4381
+
iss: issuer,
4382
+
sub: subject,
4383
+
aud: issuer,
4384
+
client_id: clientId,
4385
+
scope,
4386
+
jti: tokenId,
4387
+
iat: now,
4388
+
exp: now + expiresIn,
4389
+
cnf: { jkt: dpopJkt },
4390
+
};
4391
+
4392
+
const privateKeyHex = await this.getOAuthPrivateKey();
4393
+
const privateKey = await importPrivateKey(hexToBytes(privateKeyHex));
4394
+
4395
+
const headerB64 = base64UrlEncode(
4396
+
new TextEncoder().encode(JSON.stringify(header)),
4397
+
);
4398
+
const payloadB64 = base64UrlEncode(
4399
+
new TextEncoder().encode(JSON.stringify(payload)),
4400
+
);
4401
+
const sigInput = new TextEncoder().encode(`${headerB64}.${payloadB64}`);
4402
+
const sig = await sign(privateKey, sigInput);
4403
+
4404
+
return `${headerB64}.${payloadB64}.${base64UrlEncode(sig)}`;
4405
+
}
4406
+
4407
+
/**
4408
+
* Handle token revocation endpoint (RFC 7009).
4409
+
* Revokes access tokens and refresh tokens by client_id.
4410
+
* @param {Request} request - The incoming request
4411
+
* @param {URL} url - Parsed request URL
4412
+
* @returns {Promise<Response>} Empty 200 response on success
4413
+
*/
4414
+
async handleOAuthRevoke(request, url) {
4415
+
const issuer = `${url.protocol}//${url.host}`;
4416
+
4417
+
// Optional DPoP verification - if present, verify it
4418
+
const dpopHeader = request.headers.get('DPoP');
4419
+
if (dpopHeader) {
4420
+
try {
4421
+
const dpop = await parseDpopProof(
4422
+
dpopHeader,
4423
+
'POST',
4424
+
`${issuer}/oauth/revoke`,
4425
+
);
4426
+
// Check for DPoP replay attack
4427
+
if (!this.checkAndStoreDpopJti(dpop.jti, dpop.iat)) {
4428
+
return errorResponse(
4429
+
'invalid_dpop_proof',
4430
+
'DPoP proof replay detected',
4431
+
400,
4432
+
);
4433
+
}
4434
+
} catch (err) {
4435
+
return errorResponse('invalid_dpop_proof', err.message, 400);
4436
+
}
4437
+
}
4438
+
4439
+
/** @type {Record<string, string>} */
4440
+
let data;
4441
+
try {
4442
+
data = await parseRequestBody(request);
4443
+
} catch {
4444
+
return errorResponse('invalid_request', 'Invalid JSON body', 400);
4445
+
}
4446
+
4447
+
const validation = validateRequiredParams(data, ['token', 'client_id']);
4448
+
if (!validation.valid) {
4449
+
return errorResponse(
4450
+
'invalid_request',
4451
+
'Missing required parameters',
4452
+
400,
4453
+
);
4454
+
}
4455
+
const { token, client_id: clientId } = data;
4456
+
4457
+
this.sql.exec(
4458
+
`DELETE FROM tokens WHERE client_id = ? AND (refresh_token = ? OR token_id = ?)`,
4459
+
clientId,
4460
+
token,
4461
+
token,
4462
+
);
4463
+
4464
+
return new Response(null, { status: 200 });
4465
+
}
4466
+
}
4467
+
4468
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
4469
+
// โ WORKERS ENTRY POINT โ
4470
+
// โ Request handling, CORS, auth middleware โ
4471
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
4472
+
4473
+
const corsHeaders = {
4474
+
'Access-Control-Allow-Origin': '*',
4475
+
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
4476
+
'Access-Control-Allow-Headers':
4477
+
'Content-Type, Authorization, DPoP, atproto-accept-labelers, atproto-proxy, x-bsky-topics',
4478
+
};
4479
+
4480
+
/**
4481
+
* @param {Response} response
4482
+
* @returns {Response}
4483
+
*/
4484
+
function addCorsHeaders(response) {
4485
+
const newHeaders = new Headers(response.headers);
4486
+
for (const [key, value] of Object.entries(corsHeaders)) {
4487
+
newHeaders.set(key, value);
4488
+
}
4489
+
return new Response(response.body, {
4490
+
status: response.status,
4491
+
statusText: response.statusText,
4492
+
headers: newHeaders,
4493
+
});
1353
4494
}
1354
4495
1355
4496
export default {
4497
+
/**
4498
+
* @param {Request} request
4499
+
* @param {Env} env
4500
+
*/
1356
4501
async fetch(request, env) {
1357
-
const url = new URL(request.url)
4502
+
// Handle CORS preflight
4503
+
if (request.method === 'OPTIONS') {
4504
+
return new Response(null, { headers: corsHeaders });
4505
+
}
4506
+
4507
+
const response = await handleRequest(request, env);
4508
+
// Don't wrap WebSocket upgrades - they need the webSocket property preserved
4509
+
if (response.status === 101) {
4510
+
return response;
4511
+
}
4512
+
return addCorsHeaders(response);
4513
+
},
4514
+
};
4515
+
4516
+
/**
4517
+
* Extract subdomain from hostname (e.g., "alice" from "alice.foo.workers.dev")
4518
+
* @param {string} hostname
4519
+
* @returns {string|null}
4520
+
*/
4521
+
function getSubdomain(hostname) {
4522
+
const parts = hostname.split('.');
4523
+
// workers.dev domains: [subdomain?].[worker-name].[account].workers.dev
4524
+
// If more than 4 parts, first part(s) are user subdomain
4525
+
if (parts.length > 4 && parts.slice(-2).join('.') === 'workers.dev') {
4526
+
return parts.slice(0, -4).join('.');
4527
+
}
4528
+
// Custom domains: check if there's a subdomain before the base
4529
+
// For now, assume no subdomain on custom domains
4530
+
return null;
4531
+
}
4532
+
4533
+
/**
4534
+
* Verify auth and return DID from token.
4535
+
* Supports both legacy Bearer tokens (JWT with symmetric key) and OAuth DPoP tokens.
4536
+
* @param {Request} request - HTTP request with Authorization header
4537
+
* @param {Env} env - Environment with JWT_SECRET
4538
+
* @param {{ fetch: (req: Request) => Promise<Response> }} [pds] - PDS stub for OAuth token verification (optional)
4539
+
* @returns {Promise<{did: string, scope?: string} | {error: Response}>} DID (and scope for OAuth) or error response
4540
+
*/
4541
+
async function requireAuth(request, env, pds = undefined) {
4542
+
const authHeader = request.headers.get('Authorization');
4543
+
if (!authHeader) {
4544
+
return {
4545
+
error: errorResponse('AuthRequired', 'Authentication required', 401),
4546
+
};
4547
+
}
4548
+
4549
+
// Legacy Bearer token (symmetric JWT)
4550
+
if (authHeader.startsWith('Bearer ')) {
4551
+
const token = authHeader.slice(7);
4552
+
const jwtSecret = env?.JWT_SECRET;
4553
+
if (!jwtSecret) {
4554
+
return {
4555
+
error: errorResponse(
4556
+
'InternalServerError',
4557
+
'Server not configured for authentication',
4558
+
500,
4559
+
),
4560
+
};
4561
+
}
4562
+
4563
+
try {
4564
+
const payload = await verifyAccessJwt(token, jwtSecret);
4565
+
return { did: payload.sub };
4566
+
} catch (err) {
4567
+
const message = err instanceof Error ? err.message : String(err);
4568
+
return { error: errorResponse('InvalidToken', message, 401) };
4569
+
}
4570
+
}
4571
+
4572
+
// OAuth DPoP token
4573
+
if (authHeader.startsWith('DPoP ')) {
4574
+
if (!pds) {
4575
+
return {
4576
+
error: errorResponse(
4577
+
'InternalServerError',
4578
+
'DPoP tokens not supported on this endpoint',
4579
+
500,
4580
+
),
4581
+
};
4582
+
}
4583
+
4584
+
try {
4585
+
const result = await verifyOAuthAccessToken(
4586
+
request,
4587
+
authHeader.slice(5),
4588
+
pds,
4589
+
);
4590
+
return result;
4591
+
} catch (err) {
4592
+
const message = err instanceof Error ? err.message : String(err);
4593
+
return { error: errorResponse('InvalidToken', message, 401) };
4594
+
}
4595
+
}
4596
+
4597
+
return {
4598
+
error: errorResponse('AuthRequired', 'Invalid authorization type', 401),
4599
+
};
4600
+
}
4601
+
4602
+
/**
4603
+
* Verify an OAuth DPoP-bound access token.
4604
+
* Validates the JWT signature, expiration, DPoP binding, and proof.
4605
+
* @param {Request} request - The incoming request (for DPoP validation)
4606
+
* @param {string} token - The access token JWT
4607
+
* @param {{ fetch: (req: Request) => Promise<Response> }} pdsStub - The PDS stub with fetch method
4608
+
* @returns {Promise<{did: string, scope?: string}>} The authenticated user's DID and scope
4609
+
* @throws {Error} If verification fails
4610
+
*/
4611
+
async function verifyOAuthAccessToken(request, token, pdsStub) {
4612
+
const parts = token.split('.');
4613
+
if (parts.length !== 3) throw new Error('Invalid token format');
4614
+
4615
+
const header = JSON.parse(
4616
+
new TextDecoder().decode(base64UrlDecode(parts[0])),
4617
+
);
4618
+
if (header.typ !== 'at+jwt') throw new Error('Invalid token type');
4619
+
4620
+
// Verify signature with PDS public key (fetch from DO)
4621
+
const keyRes = await pdsStub.fetch(
4622
+
new Request('http://internal/oauth-public-key'),
4623
+
);
4624
+
const publicKeyJwk = await keyRes.json();
4625
+
const publicKey = await crypto.subtle.importKey(
4626
+
'jwk',
4627
+
publicKeyJwk,
4628
+
{ name: 'ECDSA', namedCurve: 'P-256' },
4629
+
false,
4630
+
['verify'],
4631
+
);
4632
+
4633
+
const signatureInput = new TextEncoder().encode(`${parts[0]}.${parts[1]}`);
4634
+
const signature = base64UrlDecode(parts[2]);
4635
+
4636
+
const valid = await crypto.subtle.verify(
4637
+
{ name: 'ECDSA', hash: 'SHA-256' },
4638
+
publicKey,
4639
+
/** @type {BufferSource} */ (signature),
4640
+
/** @type {BufferSource} */ (signatureInput),
4641
+
);
4642
+
if (!valid) throw new Error('Invalid token signature');
4643
+
4644
+
const payload = JSON.parse(
4645
+
new TextDecoder().decode(base64UrlDecode(parts[1])),
4646
+
);
4647
+
4648
+
if (payload.exp && payload.exp < Math.floor(Date.now() / 1000)) {
4649
+
throw new Error('Token expired');
4650
+
}
4651
+
4652
+
if (!payload.cnf?.jkt) throw new Error('Token missing DPoP binding');
4653
+
4654
+
const dpopHeader = request.headers.get('DPoP');
4655
+
if (!dpopHeader) throw new Error('DPoP proof required');
4656
+
4657
+
const url = new URL(request.url);
4658
+
const dpop = await parseDpopProof(
4659
+
dpopHeader,
4660
+
request.method,
4661
+
`${url.protocol}//${url.host}${url.pathname}`,
4662
+
payload.cnf.jkt,
4663
+
token,
4664
+
);
4665
+
4666
+
// Check for DPoP jti replay
4667
+
const jtiRes = await pdsStub.fetch(
4668
+
new Request('http://internal/check-dpop-jti', {
4669
+
method: 'POST',
4670
+
body: JSON.stringify({ jti: dpop.jti, iat: dpop.iat }),
4671
+
}),
4672
+
);
4673
+
const { fresh } = await jtiRes.json();
4674
+
if (!fresh) throw new Error('DPoP proof replay detected');
4675
+
4676
+
return { did: payload.sub, scope: payload.scope };
4677
+
}
4678
+
4679
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
4680
+
// โ SCOPES โ
4681
+
// โ OAuth scope parsing and permission checking โ
4682
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
4683
+
4684
+
/**
4685
+
* Parse a repo scope string into collection and actions.
4686
+
* Official format: repo:collection?action=create&action=update
4687
+
* Or: repo?collection=foo&action=create
4688
+
* Without actions defaults to all: create, update, delete
4689
+
* @param {string} scope - The scope string to parse
4690
+
* @returns {{ collection: string, actions: string[] } | null} Parsed scope or null if invalid
4691
+
*/
4692
+
export function parseRepoScope(scope) {
4693
+
if (!scope.startsWith('repo:') && !scope.startsWith('repo?')) return null;
4694
+
4695
+
const ALL_ACTIONS = ['create', 'update', 'delete'];
4696
+
let collection;
4697
+
let actions;
4698
+
4699
+
const questionIdx = scope.indexOf('?');
4700
+
if (questionIdx === -1) {
4701
+
// repo:collection (no query params = all actions)
4702
+
collection = scope.slice(5);
4703
+
actions = ALL_ACTIONS;
4704
+
} else {
4705
+
// Parse query parameters
4706
+
const queryString = scope.slice(questionIdx + 1);
4707
+
const params = new URLSearchParams(queryString);
4708
+
const pathPart = scope.startsWith('repo:')
4709
+
? scope.slice(5, questionIdx)
4710
+
: '';
4711
+
4712
+
collection = pathPart || params.get('collection');
4713
+
actions = params.getAll('action');
4714
+
if (actions.length === 0) actions = ALL_ACTIONS;
4715
+
}
4716
+
4717
+
if (!collection) return null;
4718
+
4719
+
// Validate actions
4720
+
const validActions = [
4721
+
...new Set(actions.filter((a) => ALL_ACTIONS.includes(a))),
4722
+
];
4723
+
if (validActions.length === 0) return null;
4724
+
4725
+
return { collection, actions: validActions };
4726
+
}
4727
+
4728
+
/**
4729
+
* Parse a blob scope string into its components.
4730
+
* Format: blob:<mime>[,<mime>...]
4731
+
* @param {string} scope - The scope string to parse
4732
+
* @returns {{ accept: string[] } | null} Parsed scope or null if invalid
4733
+
*/
4734
+
export function parseBlobScope(scope) {
4735
+
if (!scope.startsWith('blob:')) return null;
4736
+
4737
+
const mimeStr = scope.slice(5); // Remove 'blob:'
4738
+
if (!mimeStr) return null;
4739
+
4740
+
const accept = mimeStr.split(',').filter((m) => m);
4741
+
if (accept.length === 0) return null;
4742
+
4743
+
return { accept };
4744
+
}
4745
+
4746
+
/**
4747
+
* Check if a MIME pattern matches an actual MIME type.
4748
+
* @param {string} pattern - MIME pattern (e.g., 'image/\*', '\*\/\*', 'image/png')
4749
+
* @param {string} mime - Actual MIME type to check
4750
+
* @returns {boolean} Whether the pattern matches
4751
+
*/
4752
+
export function matchesMime(pattern, mime) {
4753
+
const p = pattern.toLowerCase();
4754
+
const m = mime.toLowerCase();
4755
+
4756
+
if (p === '*/*') return true;
4757
+
4758
+
if (p.endsWith('/*')) {
4759
+
const pType = p.slice(0, -2);
4760
+
const mType = m.split('/')[0];
4761
+
return pType === mType;
4762
+
}
4763
+
4764
+
return p === m;
4765
+
}
4766
+
4767
+
/**
4768
+
* Error thrown when a required scope is missing.
4769
+
*/
4770
+
class ScopeMissingError extends Error {
4771
+
/**
4772
+
* @param {string} scope - The missing scope
4773
+
*/
4774
+
constructor(scope) {
4775
+
super(`Missing required scope "${scope}"`);
4776
+
this.name = 'ScopeMissingError';
4777
+
this.scope = scope;
4778
+
this.status = 403;
4779
+
}
4780
+
}
4781
+
4782
+
/**
4783
+
* Parses and checks OAuth scope permissions.
4784
+
*/
4785
+
export class ScopePermissions {
4786
+
/**
4787
+
* @param {string | undefined} scopeString - Space-separated scope string
4788
+
*/
4789
+
constructor(scopeString) {
4790
+
/** @type {Set<string>} */
4791
+
this.scopes = new Set(
4792
+
scopeString ? scopeString.split(' ').filter((s) => s) : [],
4793
+
);
4794
+
4795
+
/** @type {Array<{ collection: string, actions: string[] }>} */
4796
+
this.repoPermissions = [];
4797
+
4798
+
/** @type {Array<{ accept: string[] }>} */
4799
+
this.blobPermissions = [];
4800
+
4801
+
for (const scope of this.scopes) {
4802
+
const repo = parseRepoScope(scope);
4803
+
if (repo) this.repoPermissions.push(repo);
4804
+
4805
+
const blob = parseBlobScope(scope);
4806
+
if (blob) this.blobPermissions.push(blob);
4807
+
}
4808
+
}
4809
+
4810
+
/**
4811
+
* Check if full access is granted (atproto or transition:generic).
4812
+
* @returns {boolean}
4813
+
*/
4814
+
hasFullAccess() {
4815
+
return this.scopes.has('atproto') || this.scopes.has('transition:generic');
4816
+
}
4817
+
4818
+
/**
4819
+
* Check if a repo operation is allowed.
4820
+
* @param {string} collection - The collection NSID
4821
+
* @param {string} action - The action (create, update, delete)
4822
+
* @returns {boolean}
4823
+
*/
4824
+
allowsRepo(collection, action) {
4825
+
if (this.hasFullAccess()) return true;
4826
+
4827
+
for (const perm of this.repoPermissions) {
4828
+
const collectionMatch =
4829
+
perm.collection === '*' || perm.collection === collection;
4830
+
const actionMatch = perm.actions.includes(action);
4831
+
if (collectionMatch && actionMatch) return true;
4832
+
}
4833
+
4834
+
return false;
4835
+
}
4836
+
4837
+
/**
4838
+
* Assert that a repo operation is allowed, throwing if not.
4839
+
* @param {string} collection - The collection NSID
4840
+
* @param {string} action - The action (create, update, delete)
4841
+
* @throws {ScopeMissingError}
4842
+
*/
4843
+
assertRepo(collection, action) {
4844
+
if (!this.allowsRepo(collection, action)) {
4845
+
throw new ScopeMissingError(`repo:${collection}?action=${action}`);
4846
+
}
4847
+
}
4848
+
4849
+
/**
4850
+
* Check if a blob operation is allowed.
4851
+
* @param {string} mime - The MIME type of the blob
4852
+
* @returns {boolean}
4853
+
*/
4854
+
allowsBlob(mime) {
4855
+
if (this.hasFullAccess()) return true;
4856
+
4857
+
for (const perm of this.blobPermissions) {
4858
+
for (const pattern of perm.accept) {
4859
+
if (matchesMime(pattern, mime)) return true;
4860
+
}
4861
+
}
4862
+
4863
+
return false;
4864
+
}
4865
+
4866
+
/**
4867
+
* Assert that a blob operation is allowed, throwing if not.
4868
+
* @param {string} mime - The MIME type of the blob
4869
+
* @throws {ScopeMissingError}
4870
+
*/
4871
+
assertBlob(mime) {
4872
+
if (!this.allowsBlob(mime)) {
4873
+
throw new ScopeMissingError(`blob:${mime}`);
4874
+
}
4875
+
}
4876
+
}
4877
+
4878
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
4879
+
// โ CONSENT PAGE DISPLAY โ
4880
+
// โ OAuth consent page rendering with scope visualization โ
4881
+
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
4882
+
4883
+
/**
4884
+
* Parse scope string into display-friendly structure.
4885
+
* @param {string} scope - Space-separated scope string
4886
+
* @returns {{ hasAtproto: boolean, hasTransitionGeneric: boolean, repoPermissions: Map<string, {create: boolean, update: boolean, delete: boolean}>, blobPermissions: string[] }}
4887
+
*/
4888
+
export function parseScopesForDisplay(scope) {
4889
+
const scopes = scope.split(' ').filter((s) => s);
4890
+
4891
+
const repoPermissions = new Map();
1358
4892
1359
-
// Endpoints that don't require ?did= param (for relay/federation)
1360
-
if (url.pathname === '/.well-known/atproto-did' ||
1361
-
url.pathname === '/xrpc/com.atproto.server.describeServer') {
1362
-
const did = url.searchParams.get('did') || 'default'
1363
-
const id = env.PDS.idFromName(did)
1364
-
const pds = env.PDS.get(id)
1365
-
// Pass hostname for describeServer
1366
-
const newReq = new Request(request.url, {
1367
-
method: request.method,
1368
-
headers: { ...Object.fromEntries(request.headers), 'x-hostname': url.hostname }
1369
-
})
1370
-
return pds.fetch(newReq)
4893
+
for (const s of scopes) {
4894
+
const repo = parseRepoScope(s);
4895
+
if (repo) {
4896
+
const existing = repoPermissions.get(repo.collection) || {
4897
+
create: false,
4898
+
update: false,
4899
+
delete: false,
4900
+
};
4901
+
for (const action of repo.actions) {
4902
+
existing[action] = true;
4903
+
}
4904
+
repoPermissions.set(repo.collection, existing);
1371
4905
}
4906
+
}
1372
4907
1373
-
// subscribeRepos WebSocket - route to default instance for firehose
1374
-
if (url.pathname === '/xrpc/com.atproto.sync.subscribeRepos') {
1375
-
const defaultId = env.PDS.idFromName('default')
1376
-
const defaultPds = env.PDS.get(defaultId)
1377
-
return defaultPds.fetch(request)
4908
+
const blobPermissions = [];
4909
+
for (const s of scopes) {
4910
+
const blob = parseBlobScope(s);
4911
+
if (blob) blobPermissions.push(...blob.accept);
4912
+
}
4913
+
4914
+
return {
4915
+
hasAtproto: scopes.includes('atproto'),
4916
+
hasTransitionGeneric: scopes.includes('transition:generic'),
4917
+
repoPermissions,
4918
+
blobPermissions,
4919
+
};
4920
+
}
4921
+
4922
+
/**
4923
+
* Escape HTML special characters.
4924
+
* @param {string} s
4925
+
* @returns {string}
4926
+
*/
4927
+
function escapeHtml(s) {
4928
+
return s
4929
+
.replace(/&/g, '&')
4930
+
.replace(/</g, '<')
4931
+
.replace(/>/g, '>')
4932
+
.replace(/"/g, '"');
4933
+
}
4934
+
4935
+
/**
4936
+
* Render repo permissions as HTML table.
4937
+
* @param {Map<string, {create: boolean, update: boolean, delete: boolean}>} repoPermissions
4938
+
* @returns {string} HTML string
4939
+
*/
4940
+
function renderRepoTable(repoPermissions) {
4941
+
if (repoPermissions.size === 0) return '';
4942
+
4943
+
let rows = '';
4944
+
for (const [collection, actions] of repoPermissions) {
4945
+
const displayCollection = collection === '*' ? '* (any)' : collection;
4946
+
rows += `<tr>
4947
+
<td>${escapeHtml(displayCollection)}</td>
4948
+
<td class="check">${actions.create ? 'โ' : ''}</td>
4949
+
<td class="check">${actions.update ? 'โ' : ''}</td>
4950
+
<td class="check">${actions.delete ? 'โ' : ''}</td>
4951
+
</tr>`;
4952
+
}
4953
+
4954
+
return `<div class="permissions-section">
4955
+
<div class="section-label">Repository permissions:</div>
4956
+
<table class="permissions-table">
4957
+
<thead><tr><th>Collection</th><th title="Create">C</th><th title="Update">U</th><th title="Delete">D</th></tr></thead>
4958
+
<tbody>${rows}</tbody>
4959
+
</table>
4960
+
</div>`;
4961
+
}
4962
+
4963
+
/**
4964
+
* Render blob permissions as HTML list.
4965
+
* @param {string[]} blobPermissions
4966
+
* @returns {string} HTML string
4967
+
*/
4968
+
function renderBlobList(blobPermissions) {
4969
+
if (blobPermissions.length === 0) return '';
4970
+
4971
+
const items = blobPermissions
4972
+
.map(
4973
+
(mime) =>
4974
+
`<li>${escapeHtml(mime === '*/*' ? 'All file types' : mime)}</li>`,
4975
+
)
4976
+
.join('');
4977
+
4978
+
return `<div class="permissions-section">
4979
+
<div class="section-label">Upload permissions:</div>
4980
+
<ul class="blob-list">${items}</ul>
4981
+
</div>`;
4982
+
}
4983
+
4984
+
/**
4985
+
* Render full permissions display based on parsed scopes.
4986
+
* @param {{ hasAtproto: boolean, hasTransitionGeneric: boolean, repoPermissions: Map<string, {create: boolean, update: boolean, delete: boolean}>, blobPermissions: string[] }} parsed
4987
+
* @returns {string} HTML string
4988
+
*/
4989
+
function renderPermissionsHtml(parsed) {
4990
+
if (parsed.hasTransitionGeneric) {
4991
+
return `<div class="warning">โ ๏ธ Full repository access requested<br>
4992
+
<small>This app can create, update, and delete any data in your repository.</small></div>`;
4993
+
}
4994
+
4995
+
if (
4996
+
parsed.repoPermissions.size === 0 &&
4997
+
parsed.blobPermissions.length === 0
4998
+
) {
4999
+
return '';
5000
+
}
5001
+
5002
+
return (
5003
+
renderRepoTable(parsed.repoPermissions) +
5004
+
renderBlobList(parsed.blobPermissions)
5005
+
);
5006
+
}
5007
+
5008
+
/**
5009
+
* Render the OAuth consent page HTML.
5010
+
* @param {{ clientName: string, clientId: string, scope: string, requestUri: string, loginHint?: string, error?: string }} params
5011
+
* @returns {string} HTML page content
5012
+
*/
5013
+
function renderConsentPage({
5014
+
clientName,
5015
+
clientId,
5016
+
scope,
5017
+
requestUri,
5018
+
loginHint = '',
5019
+
error = '',
5020
+
}) {
5021
+
const parsed = parseScopesForDisplay(scope);
5022
+
const isIdentityOnly =
5023
+
parsed.repoPermissions.size === 0 &&
5024
+
parsed.blobPermissions.length === 0 &&
5025
+
!parsed.hasTransitionGeneric;
5026
+
5027
+
return `<!DOCTYPE html>
5028
+
<html><head><meta charset="utf-8"><meta name="viewport" content="width=device-width,initial-scale=1">
5029
+
<title>Authorize</title>
5030
+
<style>
5031
+
*{box-sizing:border-box}
5032
+
body{font-family:system-ui,sans-serif;max-width:400px;margin:40px auto;padding:20px;background:#1a1a1a;color:#e0e0e0}
5033
+
h2{color:#fff;margin-bottom:24px}
5034
+
p{color:#b0b0b0;line-height:1.5}
5035
+
b{color:#fff}
5036
+
.error{color:#ff6b6b;background:#2d1f1f;padding:12px;margin:12px 0;border-radius:6px;border:1px solid #4a2020}
5037
+
label{display:block;margin:16px 0 6px;color:#b0b0b0;font-size:14px}
5038
+
input[type="password"]{width:100%;padding:12px;background:#2a2a2a;border:1px solid #404040;border-radius:6px;color:#fff;font-size:16px}
5039
+
input[type="password"]:focus{outline:none;border-color:#4a9eff;box-shadow:0 0 0 2px rgba(74,158,255,0.2)}
5040
+
.actions{display:flex;gap:12px;margin-top:24px}
5041
+
button{flex:1;padding:12px 20px;border-radius:6px;font-size:16px;font-weight:500;cursor:pointer;transition:background 0.15s}
5042
+
.deny{background:#2a2a2a;color:#e0e0e0;border:1px solid #404040}
5043
+
.deny:hover{background:#333}
5044
+
.approve{background:#2563eb;color:#fff;border:none}
5045
+
.approve:hover{background:#1d4ed8}
5046
+
.permissions-section{margin:16px 0}
5047
+
.section-label{color:#b0b0b0;font-size:13px;margin-bottom:8px}
5048
+
.permissions-table{width:100%;border-collapse:collapse;font-size:13px}
5049
+
.permissions-table th{color:#808080;font-weight:normal;text-align:left;padding:4px 8px;border-bottom:1px solid #333}
5050
+
.permissions-table th:not(:first-child){text-align:center;width:32px}
5051
+
.permissions-table td{padding:4px 8px;border-bottom:1px solid #2a2a2a}
5052
+
.permissions-table td:not(:first-child){text-align:center}
5053
+
.permissions-table .check{color:#4ade80}
5054
+
.blob-list{margin:0;padding-left:20px;color:#e0e0e0;font-size:13px}
5055
+
.blob-list li{margin:4px 0}
5056
+
.warning{background:#3d2f00;border:1px solid #5c4a00;border-radius:6px;padding:12px;color:#fbbf24;margin:16px 0}
5057
+
.warning small{color:#d4a000;display:block;margin-top:4px}
5058
+
.profile-card{display:flex;align-items:center;gap:12px;padding:16px;background:#2a2a2a;border-radius:8px;margin-bottom:20px}
5059
+
.profile-card.loading .avatar{background:#404040;animation:pulse 1.5s infinite}
5060
+
.profile-card .avatar{width:48px;height:48px;border-radius:50%;background:#404040;flex-shrink:0}
5061
+
.profile-card .avatar img{width:100%;height:100%;border-radius:50%;object-fit:cover}
5062
+
.profile-card .info{min-width:0}
5063
+
.profile-card .name{color:#fff;font-weight:500;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
5064
+
.profile-card .handle{color:#808080;font-size:14px}
5065
+
@keyframes pulse{0%,100%{opacity:1}50%{opacity:0.5}}
5066
+
</style></head>
5067
+
<body>
5068
+
${
5069
+
loginHint
5070
+
? `<div class="profile-card loading" id="profile-card">
5071
+
<div class="avatar" id="profile-avatar"></div>
5072
+
<div class="info"><div class="name" id="profile-name">Loading...</div>
5073
+
<div class="handle" id="profile-handle">${escapeHtml(loginHint.startsWith('did:') ? loginHint : `@${loginHint}`)}</div></div>
5074
+
</div>`
5075
+
: ''
5076
+
}
5077
+
<h2>Sign in to authorize</h2>
5078
+
<p><b>${escapeHtml(clientName)}</b> ${isIdentityOnly ? 'wants to uniquely identify you through your account.' : 'wants to access your account.'}</p>
5079
+
${renderPermissionsHtml(parsed)}
5080
+
${error ? `<p class="error">${escapeHtml(error)}</p>` : ''}
5081
+
<form method="POST" action="/oauth/authorize">
5082
+
<input type="hidden" name="request_uri" value="${escapeHtml(requestUri)}">
5083
+
<input type="hidden" name="client_id" value="${escapeHtml(clientId)}">
5084
+
<label>Password</label><input type="password" name="password" required autofocus>
5085
+
<div class="actions"><button type="submit" name="action" value="deny" class="deny" formnovalidate>Deny</button>
5086
+
<button type="submit" name="action" value="approve" class="approve">Authorize</button></div>
5087
+
</form>
5088
+
${
5089
+
loginHint
5090
+
? `<script>
5091
+
(async()=>{
5092
+
const card=document.getElementById('profile-card');
5093
+
if(!card)return;
5094
+
try{
5095
+
const r=await fetch('https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor='+encodeURIComponent(${JSON.stringify(loginHint)}));
5096
+
if(!r.ok)throw new Error();
5097
+
const p=await r.json();
5098
+
document.getElementById('profile-avatar').innerHTML=p.avatar?'<img src="'+p.avatar+'" alt="">':'';
5099
+
document.getElementById('profile-name').textContent=p.displayName||p.handle;
5100
+
document.getElementById('profile-handle').textContent='@'+p.handle;
5101
+
card.classList.remove('loading');
5102
+
}catch(e){card.classList.remove('loading')}
5103
+
})();
5104
+
</script>`
5105
+
: ''
5106
+
}
5107
+
</body></html>`;
5108
+
}
5109
+
5110
+
/**
5111
+
* @param {Request} request
5112
+
* @param {Env} env
5113
+
*/
5114
+
async function handleAuthenticatedBlobUpload(request, env) {
5115
+
// Get default PDS for OAuth token verification
5116
+
const defaultPds = getDefaultPds(env);
5117
+
const auth = await requireAuth(request, env, defaultPds);
5118
+
if ('error' in auth) return auth.error;
5119
+
5120
+
// Validate scope for blob upload
5121
+
if (auth.scope !== undefined) {
5122
+
const contentType =
5123
+
request.headers.get('content-type') || 'application/octet-stream';
5124
+
const permissions = new ScopePermissions(auth.scope);
5125
+
if (!permissions.allowsBlob(contentType)) {
5126
+
return errorResponse(
5127
+
'Forbidden',
5128
+
`Missing required scope "blob:${contentType}"`,
5129
+
403,
5130
+
);
1378
5131
}
5132
+
}
5133
+
// Legacy tokens without scope are trusted (backward compat)
1379
5134
1380
-
// listRepos needs to aggregate from all registered DIDs
1381
-
if (url.pathname === '/xrpc/com.atproto.sync.listRepos') {
1382
-
const defaultId = env.PDS.idFromName('default')
1383
-
const defaultPds = env.PDS.get(defaultId)
1384
-
const regRes = await defaultPds.fetch(new Request('http://internal/get-registered-dids'))
1385
-
const { dids } = await regRes.json()
5135
+
// Route to the user's DO based on their DID from the token
5136
+
const id = env.PDS.idFromName(auth.did);
5137
+
const pds = env.PDS.get(id);
5138
+
// Pass x-authed-did so DO knows auth was already done (avoids DPoP replay detection)
5139
+
return pds.fetch(
5140
+
new Request(request.url, {
5141
+
method: request.method,
5142
+
headers: {
5143
+
...Object.fromEntries(request.headers),
5144
+
'x-authed-did': auth.did,
5145
+
},
5146
+
body: request.body,
5147
+
}),
5148
+
);
5149
+
}
1386
5150
1387
-
const repos = []
1388
-
for (const did of dids) {
1389
-
const id = env.PDS.idFromName(did)
1390
-
const pds = env.PDS.get(id)
1391
-
const infoRes = await pds.fetch(new Request('http://internal/repo-info'))
1392
-
const info = await infoRes.json()
1393
-
if (info.head) {
1394
-
repos.push({ did, head: info.head, rev: info.rev, active: true })
5151
+
/**
5152
+
* @param {Request} request
5153
+
* @param {Env} env
5154
+
*/
5155
+
async function handleAuthenticatedRepoWrite(request, env) {
5156
+
// Get default PDS for OAuth token verification
5157
+
const defaultPds = getDefaultPds(env);
5158
+
const auth = await requireAuth(request, env, defaultPds);
5159
+
if ('error' in auth) return auth.error;
5160
+
5161
+
const body = await request.json();
5162
+
const repo = body.repo;
5163
+
if (!repo) {
5164
+
return errorResponse('InvalidRequest', 'missing repo param', 400);
5165
+
}
5166
+
5167
+
if (auth.did !== repo) {
5168
+
return errorResponse('Forbidden', "Cannot modify another user's repo", 403);
5169
+
}
5170
+
5171
+
// Granular scope validation for OAuth tokens
5172
+
if (auth.scope !== undefined) {
5173
+
const permissions = new ScopePermissions(auth.scope);
5174
+
const url = new URL(request.url);
5175
+
const endpoint = url.pathname;
5176
+
5177
+
if (endpoint === '/xrpc/com.atproto.repo.createRecord') {
5178
+
const collection = body.collection;
5179
+
if (!collection) {
5180
+
return errorResponse('InvalidRequest', 'missing collection param', 400);
5181
+
}
5182
+
if (!permissions.allowsRepo(collection, 'create')) {
5183
+
return errorResponse(
5184
+
'Forbidden',
5185
+
`Missing required scope "repo:${collection}:create"`,
5186
+
403,
5187
+
);
5188
+
}
5189
+
} else if (endpoint === '/xrpc/com.atproto.repo.putRecord') {
5190
+
const collection = body.collection;
5191
+
if (!collection) {
5192
+
return errorResponse('InvalidRequest', 'missing collection param', 400);
5193
+
}
5194
+
// putRecord requires both create and update permissions
5195
+
if (
5196
+
!permissions.allowsRepo(collection, 'create') ||
5197
+
!permissions.allowsRepo(collection, 'update')
5198
+
) {
5199
+
const missing = !permissions.allowsRepo(collection, 'create')
5200
+
? 'create'
5201
+
: 'update';
5202
+
return errorResponse(
5203
+
'Forbidden',
5204
+
`Missing required scope "repo:${collection}:${missing}"`,
5205
+
403,
5206
+
);
5207
+
}
5208
+
} else if (endpoint === '/xrpc/com.atproto.repo.deleteRecord') {
5209
+
const collection = body.collection;
5210
+
if (!collection) {
5211
+
return errorResponse('InvalidRequest', 'missing collection param', 400);
5212
+
}
5213
+
if (!permissions.allowsRepo(collection, 'delete')) {
5214
+
return errorResponse(
5215
+
'Forbidden',
5216
+
`Missing required scope "repo:${collection}:delete"`,
5217
+
403,
5218
+
);
5219
+
}
5220
+
} else if (endpoint === '/xrpc/com.atproto.repo.applyWrites') {
5221
+
const writes = body.writes || [];
5222
+
for (const write of writes) {
5223
+
const collection = write.collection;
5224
+
if (!collection) continue;
5225
+
5226
+
let action;
5227
+
if (write.$type === 'com.atproto.repo.applyWrites#create') {
5228
+
action = 'create';
5229
+
} else if (write.$type === 'com.atproto.repo.applyWrites#update') {
5230
+
action = 'update';
5231
+
} else if (write.$type === 'com.atproto.repo.applyWrites#delete') {
5232
+
action = 'delete';
5233
+
} else {
5234
+
continue;
1395
5235
}
5236
+
5237
+
if (!permissions.allowsRepo(collection, action)) {
5238
+
return errorResponse(
5239
+
'Forbidden',
5240
+
`Missing required scope "repo:${collection}:${action}"`,
5241
+
403,
5242
+
);
5243
+
}
5244
+
}
5245
+
}
5246
+
}
5247
+
// Legacy tokens without scope are trusted (backward compat)
5248
+
5249
+
const id = env.PDS.idFromName(repo);
5250
+
const pds = env.PDS.get(id);
5251
+
const response = await pds.fetch(
5252
+
new Request(request.url, {
5253
+
method: 'POST',
5254
+
headers: request.headers,
5255
+
body: JSON.stringify(body),
5256
+
}),
5257
+
);
5258
+
5259
+
// Notify relay of updates on successful writes
5260
+
if (response.ok) {
5261
+
const url = new URL(request.url);
5262
+
notifyCrawlers(env, url.hostname);
5263
+
}
5264
+
5265
+
return response;
5266
+
}
5267
+
5268
+
/**
5269
+
* @param {Request} request
5270
+
* @param {Env} env
5271
+
*/
5272
+
async function handleRequest(request, env) {
5273
+
const url = new URL(request.url);
5274
+
const subdomain = getSubdomain(url.hostname);
5275
+
5276
+
// Handle resolution via subdomain or bare domain
5277
+
if (url.pathname === '/.well-known/atproto-did') {
5278
+
// Look up handle -> DID in default DO
5279
+
// Use subdomain if present, otherwise try bare hostname as handle
5280
+
const handleToResolve = subdomain || url.hostname;
5281
+
const defaultPds = getDefaultPds(env);
5282
+
const resolveRes = await defaultPds.fetch(
5283
+
new Request(
5284
+
`http://internal/resolve-handle?handle=${encodeURIComponent(handleToResolve)}`,
5285
+
),
5286
+
);
5287
+
if (!resolveRes.ok) {
5288
+
return new Response('Handle not found', { status: 404 });
5289
+
}
5290
+
const { did } = await resolveRes.json();
5291
+
return new Response(did, { headers: { 'Content-Type': 'text/plain' } });
5292
+
}
5293
+
5294
+
// describeServer - works on bare domain
5295
+
if (url.pathname === '/xrpc/com.atproto.server.describeServer') {
5296
+
const defaultPds = getDefaultPds(env);
5297
+
const newReq = new Request(request.url, {
5298
+
method: request.method,
5299
+
headers: {
5300
+
...Object.fromEntries(request.headers),
5301
+
'x-hostname': url.hostname,
5302
+
},
5303
+
});
5304
+
return defaultPds.fetch(newReq);
5305
+
}
5306
+
5307
+
// Session endpoints - route to default DO (has handleMap for identifier resolution)
5308
+
const sessionEndpoints = [
5309
+
'/xrpc/com.atproto.server.createSession',
5310
+
'/xrpc/com.atproto.server.getSession',
5311
+
'/xrpc/com.atproto.server.refreshSession',
5312
+
];
5313
+
if (sessionEndpoints.includes(url.pathname)) {
5314
+
const defaultPds = getDefaultPds(env);
5315
+
return defaultPds.fetch(request);
5316
+
}
5317
+
5318
+
// Proxy app.bsky.* endpoints to Bluesky AppView
5319
+
if (url.pathname.startsWith('/xrpc/app.bsky.')) {
5320
+
// Get default PDS for OAuth token verification
5321
+
const defaultPds = getDefaultPds(env);
5322
+
// Authenticate the user first
5323
+
const auth = await requireAuth(request, env, defaultPds);
5324
+
if ('error' in auth) return auth.error;
5325
+
5326
+
// Route to the user's DO instance to create service auth and proxy
5327
+
const id = env.PDS.idFromName(auth.did);
5328
+
const pds = env.PDS.get(id);
5329
+
return pds.fetch(
5330
+
new Request(request.url, {
5331
+
method: request.method,
5332
+
headers: {
5333
+
...Object.fromEntries(request.headers),
5334
+
'x-authed-did': auth.did, // Pass the authenticated DID
5335
+
},
5336
+
body:
5337
+
request.method !== 'GET' && request.method !== 'HEAD'
5338
+
? request.body
5339
+
: undefined,
5340
+
}),
5341
+
);
5342
+
}
5343
+
5344
+
// Handle registration routes - go to default DO
5345
+
if (
5346
+
url.pathname === '/register-handle' ||
5347
+
url.pathname === '/resolve-handle'
5348
+
) {
5349
+
const defaultPds = getDefaultPds(env);
5350
+
return defaultPds.fetch(request);
5351
+
}
5352
+
5353
+
// resolveHandle XRPC endpoint
5354
+
if (url.pathname === '/xrpc/com.atproto.identity.resolveHandle') {
5355
+
const handle = url.searchParams.get('handle');
5356
+
if (!handle) {
5357
+
return errorResponse('InvalidRequest', 'missing handle param', 400);
5358
+
}
5359
+
const defaultPds = getDefaultPds(env);
5360
+
const resolveRes = await defaultPds.fetch(
5361
+
new Request(
5362
+
`http://internal/resolve-handle?handle=${encodeURIComponent(handle)}`,
5363
+
),
5364
+
);
5365
+
if (!resolveRes.ok) {
5366
+
return errorResponse('InvalidRequest', 'Unable to resolve handle', 400);
5367
+
}
5368
+
const { did } = await resolveRes.json();
5369
+
return Response.json({ did });
5370
+
}
5371
+
5372
+
// subscribeRepos WebSocket - route to default instance for firehose
5373
+
if (url.pathname === '/xrpc/com.atproto.sync.subscribeRepos') {
5374
+
const defaultPds = getDefaultPds(env);
5375
+
return defaultPds.fetch(request);
5376
+
}
5377
+
5378
+
// listRepos needs to aggregate from all registered DIDs
5379
+
if (url.pathname === '/xrpc/com.atproto.sync.listRepos') {
5380
+
const defaultPds = getDefaultPds(env);
5381
+
const regRes = await defaultPds.fetch(
5382
+
new Request('http://internal/get-registered-dids'),
5383
+
);
5384
+
const { dids } = await regRes.json();
5385
+
5386
+
const repos = [];
5387
+
for (const did of dids) {
5388
+
const id = env.PDS.idFromName(did);
5389
+
const pds = env.PDS.get(id);
5390
+
const infoRes = await pds.fetch(new Request('http://internal/repo-info'));
5391
+
const info = await infoRes.json();
5392
+
if (info.head) {
5393
+
repos.push({ did, head: info.head, rev: info.rev, active: true });
1396
5394
}
1397
-
return Response.json({ repos, cursor: undefined })
1398
5395
}
5396
+
return Response.json({ repos, cursor: undefined });
5397
+
}
5398
+
5399
+
// Repo endpoints use ?repo= param instead of ?did=
5400
+
if (
5401
+
url.pathname === '/xrpc/com.atproto.repo.describeRepo' ||
5402
+
url.pathname === '/xrpc/com.atproto.repo.listRecords' ||
5403
+
url.pathname === '/xrpc/com.atproto.repo.getRecord'
5404
+
) {
5405
+
const repo = url.searchParams.get('repo');
5406
+
if (!repo) {
5407
+
return errorResponse('InvalidRequest', 'missing repo param', 400);
5408
+
}
5409
+
5410
+
// Check for atproto-proxy header - if present, proxy to specified service
5411
+
const proxyHeader = request.headers.get('atproto-proxy');
5412
+
if (proxyHeader) {
5413
+
const parsed = parseAtprotoProxyHeader(proxyHeader);
5414
+
if (!parsed) {
5415
+
// Header present but malformed
5416
+
return errorResponse(
5417
+
'InvalidRequest',
5418
+
`Malformed atproto-proxy header: ${proxyHeader}`,
5419
+
400,
5420
+
);
5421
+
}
5422
+
const serviceUrl = getKnownServiceUrl(parsed.did, parsed.serviceId);
5423
+
if (serviceUrl) {
5424
+
return proxyToService(request, serviceUrl);
5425
+
}
5426
+
// Unknown service - could add DID resolution here in the future
5427
+
return errorResponse(
5428
+
'InvalidRequest',
5429
+
`Unknown proxy service: ${proxyHeader}`,
5430
+
400,
5431
+
);
5432
+
}
5433
+
5434
+
// No proxy header - handle locally (returns appropriate error if DID not found)
5435
+
const id = env.PDS.idFromName(repo);
5436
+
const pds = env.PDS.get(id);
5437
+
return pds.fetch(request);
5438
+
}
1399
5439
1400
-
const did = url.searchParams.get('did')
5440
+
// Sync endpoints use ?did= param
5441
+
if (
5442
+
url.pathname === '/xrpc/com.atproto.sync.getLatestCommit' ||
5443
+
url.pathname === '/xrpc/com.atproto.sync.getRepoStatus' ||
5444
+
url.pathname === '/xrpc/com.atproto.sync.getRepo' ||
5445
+
url.pathname === '/xrpc/com.atproto.sync.getRecord' ||
5446
+
url.pathname === '/xrpc/com.atproto.sync.getBlob' ||
5447
+
url.pathname === '/xrpc/com.atproto.sync.listBlobs'
5448
+
) {
5449
+
const did = url.searchParams.get('did');
5450
+
if (!did) {
5451
+
return errorResponse('InvalidRequest', 'missing did param', 400);
5452
+
}
5453
+
const id = env.PDS.idFromName(did);
5454
+
const pds = env.PDS.get(id);
5455
+
return pds.fetch(request);
5456
+
}
5457
+
5458
+
// Blob upload endpoint (binary body, uses DID from token)
5459
+
if (url.pathname === '/xrpc/com.atproto.repo.uploadBlob') {
5460
+
return handleAuthenticatedBlobUpload(request, env);
5461
+
}
5462
+
5463
+
// Authenticated repo write endpoints
5464
+
const repoWriteEndpoints = [
5465
+
'/xrpc/com.atproto.repo.createRecord',
5466
+
'/xrpc/com.atproto.repo.deleteRecord',
5467
+
'/xrpc/com.atproto.repo.putRecord',
5468
+
'/xrpc/com.atproto.repo.applyWrites',
5469
+
];
5470
+
if (repoWriteEndpoints.includes(url.pathname)) {
5471
+
return handleAuthenticatedRepoWrite(request, env);
5472
+
}
5473
+
5474
+
// Health check endpoint
5475
+
if (url.pathname === '/xrpc/_health') {
5476
+
return Response.json({ version: VERSION });
5477
+
}
5478
+
5479
+
// Root path - ASCII art
5480
+
if (url.pathname === '/') {
5481
+
const ascii = `
5482
+
โโโโโโโ โโโโโโโ โโโโโโโโ โโโ โโโโโโโโ
5483
+
โโโโโโโโ โโโโโโโโ โโโโโโโโ โโโ โโโโโโโโ
5484
+
โโโโโโโโ โโโ โโโ โโโโโโโโ โโโ โโโโโโโโ
5485
+
โโโโโโโ โโโ โโโ โโโโโโโโ โโ โโโ โโโโโโโโ
5486
+
โโโ โโโโโโโโ โโโโโโโโ โโโ โโโโโโโโ โโโโโโโโ
5487
+
โโโ โโโโโโโ โโโโโโโโ โโโ โโโโโโ โโโโโโโโ
5488
+
5489
+
ATProto PDS on Cloudflare Workers
5490
+
`;
5491
+
return new Response(ascii, {
5492
+
headers: { 'Content-Type': 'text/plain; charset=utf-8' },
5493
+
});
5494
+
}
5495
+
5496
+
// On init, register this DID with the default instance (requires ?did= param, no auth yet)
5497
+
if (url.pathname === '/init' && request.method === 'POST') {
5498
+
const did = url.searchParams.get('did');
1401
5499
if (!did) {
1402
-
return new Response('missing did param', { status: 400 })
5500
+
return errorResponse('InvalidRequest', 'missing did param', 400);
1403
5501
}
5502
+
const body = await request.json();
1404
5503
1405
-
// On init, also register this DID with the default instance
1406
-
if (url.pathname === '/init' && request.method === 'POST') {
1407
-
const body = await request.json()
5504
+
// Register with default instance for discovery
5505
+
const defaultPds = getDefaultPds(env);
5506
+
await defaultPds.fetch(
5507
+
new Request('http://internal/register-did', {
5508
+
method: 'POST',
5509
+
body: JSON.stringify({ did }),
5510
+
}),
5511
+
);
1408
5512
1409
-
// Register with default instance for discovery
1410
-
const defaultId = env.PDS.idFromName('default')
1411
-
const defaultPds = env.PDS.get(defaultId)
1412
-
await defaultPds.fetch(new Request('http://internal/register-did', {
5513
+
// Register handle if provided
5514
+
if (body.handle) {
5515
+
await defaultPds.fetch(
5516
+
new Request('http://internal/register-handle', {
5517
+
method: 'POST',
5518
+
body: JSON.stringify({ did, handle: body.handle }),
5519
+
}),
5520
+
);
5521
+
}
5522
+
5523
+
// Also initialize default instance with identity for OAuth (single-user PDS)
5524
+
await defaultPds.fetch(
5525
+
new Request('http://internal/init', {
1413
5526
method: 'POST',
1414
-
body: JSON.stringify({ did })
1415
-
}))
5527
+
body: JSON.stringify(body),
5528
+
}),
5529
+
);
1416
5530
1417
-
// Forward to the actual PDS instance
1418
-
const id = env.PDS.idFromName(did)
1419
-
const pds = env.PDS.get(id)
1420
-
return pds.fetch(new Request(request.url, {
5531
+
// Forward to the actual PDS instance
5532
+
const id = env.PDS.idFromName(did);
5533
+
const pds = env.PDS.get(id);
5534
+
return pds.fetch(
5535
+
new Request(request.url, {
1421
5536
method: 'POST',
1422
5537
headers: request.headers,
1423
-
body: JSON.stringify(body)
1424
-
}))
1425
-
}
5538
+
body: JSON.stringify(body),
5539
+
}),
5540
+
);
5541
+
}
1426
5542
1427
-
const id = env.PDS.idFromName(did)
1428
-
const pds = env.PDS.get(id)
1429
-
return pds.fetch(request)
5543
+
// OAuth endpoints - route to default PDS instance
5544
+
const oauthEndpoints = [
5545
+
'/.well-known/oauth-authorization-server',
5546
+
'/.well-known/oauth-protected-resource',
5547
+
'/oauth/jwks',
5548
+
'/oauth/par',
5549
+
'/oauth/authorize',
5550
+
'/oauth/token',
5551
+
'/oauth/revoke',
5552
+
];
5553
+
if (oauthEndpoints.includes(url.pathname)) {
5554
+
const defaultPds = getDefaultPds(env);
5555
+
return defaultPds.fetch(request);
1430
5556
}
5557
+
5558
+
// Unknown endpoint
5559
+
return errorResponse('NotFound', 'Endpoint not found', 404);
1431
5560
}
+1837
test/e2e.test.js
+1837
test/e2e.test.js
···
1
+
/**
2
+
* E2E tests for PDS - runs against local wrangler dev
3
+
* Uses Node's built-in test runner and fetch
4
+
*/
5
+
6
+
import assert from 'node:assert';
7
+
import { spawn } from 'node:child_process';
8
+
import { randomBytes } from 'node:crypto';
9
+
import { after, before, describe, it } from 'node:test';
10
+
import { DpopClient } from './helpers/dpop.js';
11
+
import { getOAuthTokenWithScope } from './helpers/oauth.js';
12
+
13
+
const BASE = 'http://localhost:8787';
14
+
const DID = `did:plc:test${randomBytes(8).toString('hex')}`;
15
+
const PASSWORD = 'test-password';
16
+
17
+
/** @type {import('node:child_process').ChildProcess|null} */
18
+
let wrangler = null;
19
+
/** @type {string} */
20
+
let token = '';
21
+
/** @type {string} */
22
+
let refreshToken = '';
23
+
/** @type {string} */
24
+
let testRkey = '';
25
+
26
+
/**
27
+
* Wait for server to be ready
28
+
*/
29
+
async function waitForServer(maxAttempts = 30) {
30
+
for (let i = 0; i < maxAttempts; i++) {
31
+
try {
32
+
const res = await fetch(`${BASE}/`);
33
+
if (res.ok) return;
34
+
} catch {
35
+
// Server not ready yet
36
+
}
37
+
await new Promise((r) => setTimeout(r, 500));
38
+
}
39
+
throw new Error('Server failed to start');
40
+
}
41
+
42
+
/**
43
+
* Make JSON request helper (with retry for flaky wrangler dev 5xx errors)
44
+
*/
45
+
async function jsonPost(path, body, headers = {}) {
46
+
for (let attempt = 0; attempt < 3; attempt++) {
47
+
const res = await fetch(`${BASE}${path}`, {
48
+
method: 'POST',
49
+
headers: { 'Content-Type': 'application/json', ...headers },
50
+
body: JSON.stringify(body),
51
+
});
52
+
// Retry on 5xx errors (wrangler dev flakiness)
53
+
if (res.status >= 500 && attempt < 2) {
54
+
await new Promise((r) => setTimeout(r, 100 * (attempt + 1)));
55
+
continue;
56
+
}
57
+
return { status: res.status, data: res.ok ? await res.json() : null };
58
+
}
59
+
}
60
+
61
+
/**
62
+
* Make form-encoded POST (with retry for flaky wrangler dev 5xx errors)
63
+
*/
64
+
async function formPost(path, params, headers = {}) {
65
+
for (let attempt = 0; attempt < 3; attempt++) {
66
+
const res = await fetch(`${BASE}${path}`, {
67
+
method: 'POST',
68
+
headers: {
69
+
'Content-Type': 'application/x-www-form-urlencoded',
70
+
...headers,
71
+
},
72
+
body: new URLSearchParams(params).toString(),
73
+
});
74
+
// Retry on 5xx errors (wrangler dev flakiness)
75
+
if (res.status >= 500 && attempt < 2) {
76
+
await new Promise((r) => setTimeout(r, 100 * (attempt + 1)));
77
+
continue;
78
+
}
79
+
const text = await res.text();
80
+
let data = null;
81
+
try {
82
+
data = JSON.parse(text);
83
+
} catch {
84
+
data = text;
85
+
}
86
+
return { status: res.status, data };
87
+
}
88
+
}
89
+
90
+
describe('E2E Tests', () => {
91
+
before(async () => {
92
+
// Start wrangler
93
+
wrangler = spawn(
94
+
'npx',
95
+
['wrangler', 'dev', '--port', '8787', '--persist-to', '.wrangler/state'],
96
+
{
97
+
stdio: 'pipe',
98
+
cwd: process.cwd(),
99
+
},
100
+
);
101
+
102
+
await waitForServer();
103
+
104
+
// Initialize PDS
105
+
const privKey = randomBytes(32).toString('hex');
106
+
const res = await fetch(`${BASE}/init?did=${DID}`, {
107
+
method: 'POST',
108
+
headers: { 'Content-Type': 'application/json' },
109
+
body: JSON.stringify({
110
+
did: DID,
111
+
privateKey: privKey,
112
+
handle: 'test.local',
113
+
}),
114
+
});
115
+
assert.ok(res.ok, 'PDS initialization failed');
116
+
});
117
+
118
+
after(() => {
119
+
if (wrangler) {
120
+
wrangler.kill();
121
+
}
122
+
});
123
+
124
+
describe('Server endpoints', () => {
125
+
it('root returns ASCII art', async () => {
126
+
const res = await fetch(`${BASE}/`);
127
+
const text = await res.text();
128
+
assert.ok(text.includes('PDS'), 'Root should contain PDS');
129
+
});
130
+
131
+
it('describeServer returns DID', async () => {
132
+
const res = await fetch(`${BASE}/xrpc/com.atproto.server.describeServer`);
133
+
const data = await res.json();
134
+
assert.ok(data.did, 'describeServer should return did');
135
+
});
136
+
137
+
it('resolveHandle returns DID', async () => {
138
+
const res = await fetch(
139
+
`${BASE}/xrpc/com.atproto.identity.resolveHandle?handle=test.local`,
140
+
);
141
+
const data = await res.json();
142
+
assert.ok(data.did, 'resolveHandle should return did');
143
+
});
144
+
});
145
+
146
+
describe('Authentication', () => {
147
+
it('createSession returns tokens', async () => {
148
+
const { status, data } = await jsonPost(
149
+
'/xrpc/com.atproto.server.createSession',
150
+
{
151
+
identifier: DID,
152
+
password: PASSWORD,
153
+
},
154
+
);
155
+
assert.strictEqual(status, 200);
156
+
assert.ok(data.accessJwt, 'Should return accessJwt');
157
+
assert.ok(data.refreshJwt, 'Should return refreshJwt');
158
+
token = data.accessJwt;
159
+
refreshToken = data.refreshJwt;
160
+
});
161
+
162
+
it('getSession with valid token', async () => {
163
+
const res = await fetch(`${BASE}/xrpc/com.atproto.server.getSession`, {
164
+
headers: { Authorization: `Bearer ${token}` },
165
+
});
166
+
const data = await res.json();
167
+
assert.ok(data.did, 'getSession should return did');
168
+
});
169
+
170
+
it('refreshSession returns new tokens', async () => {
171
+
const res = await fetch(
172
+
`${BASE}/xrpc/com.atproto.server.refreshSession`,
173
+
{
174
+
method: 'POST',
175
+
headers: { Authorization: `Bearer ${refreshToken}` },
176
+
},
177
+
);
178
+
const data = await res.json();
179
+
assert.ok(data.accessJwt, 'Should return new accessJwt');
180
+
assert.ok(data.refreshJwt, 'Should return new refreshJwt');
181
+
token = data.accessJwt; // Use new token
182
+
});
183
+
184
+
it('refreshSession rejects access token', async () => {
185
+
const res = await fetch(
186
+
`${BASE}/xrpc/com.atproto.server.refreshSession`,
187
+
{
188
+
method: 'POST',
189
+
headers: { Authorization: `Bearer ${token}` },
190
+
},
191
+
);
192
+
assert.strictEqual(res.status, 400);
193
+
});
194
+
195
+
it('refreshSession rejects missing auth', async () => {
196
+
const res = await fetch(
197
+
`${BASE}/xrpc/com.atproto.server.refreshSession`,
198
+
{
199
+
method: 'POST',
200
+
},
201
+
);
202
+
assert.strictEqual(res.status, 401);
203
+
});
204
+
205
+
it('createRecord rejects without auth', async () => {
206
+
const { status } = await jsonPost('/xrpc/com.atproto.repo.createRecord', {
207
+
repo: 'x',
208
+
collection: 'x',
209
+
record: {},
210
+
});
211
+
assert.strictEqual(status, 401);
212
+
});
213
+
214
+
it('getPreferences works', async () => {
215
+
const res = await fetch(`${BASE}/xrpc/app.bsky.actor.getPreferences`, {
216
+
headers: { Authorization: `Bearer ${token}` },
217
+
});
218
+
const data = await res.json();
219
+
assert.ok(data.preferences, 'Should return preferences');
220
+
});
221
+
222
+
it('putPreferences works', async () => {
223
+
const { status } = await jsonPost(
224
+
'/xrpc/app.bsky.actor.putPreferences',
225
+
{ preferences: [{ $type: 'app.bsky.actor.defs#savedFeedsPrefV2' }] },
226
+
{ Authorization: `Bearer ${token}` },
227
+
);
228
+
assert.strictEqual(status, 200);
229
+
});
230
+
});
231
+
232
+
describe('Record operations', () => {
233
+
it('createRecord with auth', async () => {
234
+
const { status, data } = await jsonPost(
235
+
'/xrpc/com.atproto.repo.createRecord',
236
+
{
237
+
repo: DID,
238
+
collection: 'app.bsky.feed.post',
239
+
record: { text: 'test', createdAt: new Date().toISOString() },
240
+
},
241
+
{ Authorization: `Bearer ${token}` },
242
+
);
243
+
assert.strictEqual(status, 200);
244
+
assert.ok(data.uri, 'Should return uri');
245
+
testRkey = data.uri.split('/').pop();
246
+
});
247
+
248
+
it('getRecord returns record', async () => {
249
+
const res = await fetch(
250
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=${DID}&collection=app.bsky.feed.post&rkey=${testRkey}`,
251
+
);
252
+
const data = await res.json();
253
+
assert.ok(data.value?.text, 'Should return record value');
254
+
});
255
+
256
+
it('putRecord updates record', async () => {
257
+
const { status, data } = await jsonPost(
258
+
'/xrpc/com.atproto.repo.putRecord',
259
+
{
260
+
repo: DID,
261
+
collection: 'app.bsky.feed.post',
262
+
rkey: testRkey,
263
+
record: { text: 'updated', createdAt: new Date().toISOString() },
264
+
},
265
+
{ Authorization: `Bearer ${token}` },
266
+
);
267
+
assert.strictEqual(status, 200);
268
+
assert.ok(data.uri);
269
+
});
270
+
271
+
it('listRecords returns records', async () => {
272
+
const res = await fetch(
273
+
`${BASE}/xrpc/com.atproto.repo.listRecords?repo=${DID}&collection=app.bsky.feed.post`,
274
+
);
275
+
const data = await res.json();
276
+
assert.ok(data.records?.length > 0, 'Should return records');
277
+
});
278
+
279
+
it('describeRepo returns did', async () => {
280
+
const res = await fetch(
281
+
`${BASE}/xrpc/com.atproto.repo.describeRepo?repo=${DID}`,
282
+
);
283
+
const data = await res.json();
284
+
assert.ok(data.did);
285
+
});
286
+
287
+
it('applyWrites create', async () => {
288
+
const { status, data } = await jsonPost(
289
+
'/xrpc/com.atproto.repo.applyWrites',
290
+
{
291
+
repo: DID,
292
+
writes: [
293
+
{
294
+
$type: 'com.atproto.repo.applyWrites#create',
295
+
collection: 'app.bsky.feed.post',
296
+
rkey: 'applytest',
297
+
value: { text: 'batch', createdAt: new Date().toISOString() },
298
+
},
299
+
],
300
+
},
301
+
{ Authorization: `Bearer ${token}` },
302
+
);
303
+
assert.strictEqual(status, 200);
304
+
assert.ok(data.results);
305
+
});
306
+
307
+
it('applyWrites delete', async () => {
308
+
const { status, data } = await jsonPost(
309
+
'/xrpc/com.atproto.repo.applyWrites',
310
+
{
311
+
repo: DID,
312
+
writes: [
313
+
{
314
+
$type: 'com.atproto.repo.applyWrites#delete',
315
+
collection: 'app.bsky.feed.post',
316
+
rkey: 'applytest',
317
+
},
318
+
],
319
+
},
320
+
{ Authorization: `Bearer ${token}` },
321
+
);
322
+
assert.strictEqual(status, 200);
323
+
assert.ok(data.results);
324
+
});
325
+
});
326
+
327
+
describe('Sync endpoints', () => {
328
+
it('getLatestCommit returns cid', async () => {
329
+
const res = await fetch(
330
+
`${BASE}/xrpc/com.atproto.sync.getLatestCommit?did=${DID}`,
331
+
);
332
+
const data = await res.json();
333
+
assert.ok(data.cid);
334
+
});
335
+
336
+
it('getRepoStatus returns did', async () => {
337
+
const res = await fetch(
338
+
`${BASE}/xrpc/com.atproto.sync.getRepoStatus?did=${DID}`,
339
+
);
340
+
const data = await res.json();
341
+
assert.ok(data.did);
342
+
});
343
+
344
+
it('getRepo returns CAR', async () => {
345
+
const res = await fetch(
346
+
`${BASE}/xrpc/com.atproto.sync.getRepo?did=${DID}`,
347
+
);
348
+
const data = await res.arrayBuffer();
349
+
assert.ok(data.byteLength > 100, 'Should return CAR data');
350
+
});
351
+
352
+
it('getRecord returns record CAR', async () => {
353
+
const res = await fetch(
354
+
`${BASE}/xrpc/com.atproto.sync.getRecord?did=${DID}&collection=app.bsky.feed.post&rkey=${testRkey}`,
355
+
);
356
+
const data = await res.arrayBuffer();
357
+
assert.ok(data.byteLength > 50);
358
+
});
359
+
360
+
it('listRepos returns repos', async () => {
361
+
const res = await fetch(`${BASE}/xrpc/com.atproto.sync.listRepos`);
362
+
const data = await res.json();
363
+
assert.ok(data.repos?.length > 0);
364
+
});
365
+
});
366
+
367
+
describe('Error handling', () => {
368
+
it('invalid password rejected (401)', async () => {
369
+
const { status } = await jsonPost(
370
+
'/xrpc/com.atproto.server.createSession',
371
+
{
372
+
identifier: DID,
373
+
password: 'wrong-password',
374
+
},
375
+
);
376
+
assert.strictEqual(status, 401);
377
+
});
378
+
379
+
it('wrong repo rejected (403)', async () => {
380
+
const { status } = await jsonPost(
381
+
'/xrpc/com.atproto.repo.createRecord',
382
+
{
383
+
repo: 'did:plc:z72i7hdynmk6r22z27h6tvur',
384
+
collection: 'app.bsky.feed.post',
385
+
record: { text: 'x', createdAt: '2024-01-01T00:00:00Z' },
386
+
},
387
+
{ Authorization: `Bearer ${token}` },
388
+
);
389
+
assert.strictEqual(status, 403);
390
+
});
391
+
392
+
it('non-existent record errors', async () => {
393
+
const res = await fetch(
394
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=${DID}&collection=app.bsky.feed.post&rkey=nonexistent`,
395
+
);
396
+
assert.ok([400, 404].includes(res.status));
397
+
});
398
+
});
399
+
400
+
describe('Blob endpoints', () => {
401
+
/** @type {string} */
402
+
let blobCid = '';
403
+
/** @type {string} */
404
+
let blobPostRkey = '';
405
+
406
+
// Create minimal PNG
407
+
const pngBytes = new Uint8Array([
408
+
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d,
409
+
0x49, 0x48, 0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01,
410
+
0x08, 0x06, 0x00, 0x00, 0x00, 0x1f, 0x15, 0xc4, 0x89, 0x00, 0x00, 0x00,
411
+
0x0a, 0x49, 0x44, 0x41, 0x54, 0x78, 0x9c, 0x63, 0x00, 0x01, 0x00, 0x00,
412
+
0x05, 0x00, 0x01, 0x0d, 0x0a, 0x2d, 0xb4, 0x00, 0x00, 0x00, 0x00, 0x49,
413
+
0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82,
414
+
]);
415
+
416
+
it('uploadBlob rejects without auth', async () => {
417
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.uploadBlob`, {
418
+
method: 'POST',
419
+
headers: { 'Content-Type': 'image/png' },
420
+
body: pngBytes,
421
+
});
422
+
assert.strictEqual(res.status, 401);
423
+
});
424
+
425
+
it('uploadBlob returns CID', async () => {
426
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.uploadBlob`, {
427
+
method: 'POST',
428
+
headers: {
429
+
'Content-Type': 'image/png',
430
+
Authorization: `Bearer ${token}`,
431
+
},
432
+
body: pngBytes,
433
+
});
434
+
const data = await res.json();
435
+
assert.ok(data.blob?.ref?.$link);
436
+
assert.strictEqual(data.blob?.mimeType, 'image/png');
437
+
blobCid = data.blob.ref.$link;
438
+
});
439
+
440
+
it('listBlobs includes uploaded blob', async () => {
441
+
const res = await fetch(
442
+
`${BASE}/xrpc/com.atproto.sync.listBlobs?did=${DID}`,
443
+
);
444
+
const data = await res.json();
445
+
assert.ok(data.cids?.includes(blobCid));
446
+
});
447
+
448
+
it('getBlob retrieves data', async () => {
449
+
const res = await fetch(
450
+
`${BASE}/xrpc/com.atproto.sync.getBlob?did=${DID}&cid=${blobCid}`,
451
+
);
452
+
assert.ok(res.ok);
453
+
assert.strictEqual(res.headers.get('content-type'), 'image/png');
454
+
assert.strictEqual(res.headers.get('x-content-type-options'), 'nosniff');
455
+
});
456
+
457
+
it('getBlob rejects wrong DID', async () => {
458
+
const res = await fetch(
459
+
`${BASE}/xrpc/com.atproto.sync.getBlob?did=did:plc:wrongdid&cid=${blobCid}`,
460
+
);
461
+
assert.strictEqual(res.status, 400);
462
+
});
463
+
464
+
it('getBlob rejects invalid CID', async () => {
465
+
const res = await fetch(
466
+
`${BASE}/xrpc/com.atproto.sync.getBlob?did=${DID}&cid=invalid`,
467
+
);
468
+
assert.strictEqual(res.status, 400);
469
+
});
470
+
471
+
it('getBlob 404 for missing blob', async () => {
472
+
const res = await fetch(
473
+
`${BASE}/xrpc/com.atproto.sync.getBlob?did=${DID}&cid=bafkreiaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa`,
474
+
);
475
+
assert.strictEqual(res.status, 404);
476
+
});
477
+
478
+
it('createRecord with blob ref', async () => {
479
+
const { status, data } = await jsonPost(
480
+
'/xrpc/com.atproto.repo.createRecord',
481
+
{
482
+
repo: DID,
483
+
collection: 'app.bsky.feed.post',
484
+
record: {
485
+
text: 'post with image',
486
+
createdAt: new Date().toISOString(),
487
+
embed: {
488
+
$type: 'app.bsky.embed.images',
489
+
images: [
490
+
{
491
+
image: {
492
+
$type: 'blob',
493
+
ref: { $link: blobCid },
494
+
mimeType: 'image/png',
495
+
size: pngBytes.length,
496
+
},
497
+
alt: 'test',
498
+
},
499
+
],
500
+
},
501
+
},
502
+
},
503
+
{ Authorization: `Bearer ${token}` },
504
+
);
505
+
assert.strictEqual(status, 200);
506
+
blobPostRkey = data.uri.split('/').pop();
507
+
});
508
+
509
+
it('blob persists after record creation', async () => {
510
+
const res = await fetch(
511
+
`${BASE}/xrpc/com.atproto.sync.listBlobs?did=${DID}`,
512
+
);
513
+
const data = await res.json();
514
+
assert.ok(data.cids?.includes(blobCid));
515
+
});
516
+
517
+
it('deleteRecord with blob cleans up', async () => {
518
+
const { status } = await jsonPost(
519
+
'/xrpc/com.atproto.repo.deleteRecord',
520
+
{ repo: DID, collection: 'app.bsky.feed.post', rkey: blobPostRkey },
521
+
{ Authorization: `Bearer ${token}` },
522
+
);
523
+
assert.strictEqual(status, 200);
524
+
525
+
const res = await fetch(
526
+
`${BASE}/xrpc/com.atproto.sync.listBlobs?did=${DID}`,
527
+
);
528
+
const data = await res.json();
529
+
assert.strictEqual(
530
+
data.cids?.length,
531
+
0,
532
+
'Orphaned blob should be cleaned up',
533
+
);
534
+
});
535
+
});
536
+
537
+
describe('OAuth endpoints', () => {
538
+
it('AS metadata', async () => {
539
+
const res = await fetch(`${BASE}/.well-known/oauth-authorization-server`);
540
+
const data = await res.json();
541
+
assert.strictEqual(data.issuer, BASE);
542
+
assert.strictEqual(
543
+
data.authorization_endpoint,
544
+
`${BASE}/oauth/authorize`,
545
+
);
546
+
assert.strictEqual(data.token_endpoint, `${BASE}/oauth/token`);
547
+
assert.strictEqual(
548
+
data.pushed_authorization_request_endpoint,
549
+
`${BASE}/oauth/par`,
550
+
);
551
+
assert.strictEqual(data.revocation_endpoint, `${BASE}/oauth/revoke`);
552
+
assert.strictEqual(data.jwks_uri, `${BASE}/oauth/jwks`);
553
+
assert.deepStrictEqual(data.scopes_supported, ['atproto']);
554
+
assert.deepStrictEqual(data.dpop_signing_alg_values_supported, ['ES256']);
555
+
assert.strictEqual(data.require_pushed_authorization_requests, false);
556
+
assert.strictEqual(data.client_id_metadata_document_supported, true);
557
+
assert.deepStrictEqual(data.protected_resources, [BASE]);
558
+
});
559
+
560
+
it('PR metadata', async () => {
561
+
const res = await fetch(`${BASE}/.well-known/oauth-protected-resource`);
562
+
const data = await res.json();
563
+
assert.strictEqual(data.resource, BASE);
564
+
assert.deepStrictEqual(data.authorization_servers, [BASE]);
565
+
});
566
+
567
+
it('JWKS endpoint', async () => {
568
+
const res = await fetch(`${BASE}/oauth/jwks`);
569
+
const data = await res.json();
570
+
assert.ok(data.keys?.length > 0);
571
+
const key = data.keys[0];
572
+
assert.strictEqual(key.kty, 'EC');
573
+
assert.strictEqual(key.crv, 'P-256');
574
+
assert.strictEqual(key.alg, 'ES256');
575
+
assert.strictEqual(key.use, 'sig');
576
+
assert.ok(key.x && key.y, 'Should have x,y coords');
577
+
assert.ok(!key.d, 'Should not expose private key');
578
+
});
579
+
580
+
it('PAR rejects missing DPoP', async () => {
581
+
const { status, data } = await formPost('/oauth/par', {
582
+
client_id: 'http://localhost:3000',
583
+
redirect_uri: 'http://localhost:3000/callback',
584
+
response_type: 'code',
585
+
scope: 'atproto',
586
+
code_challenge: 'test',
587
+
code_challenge_method: 'S256',
588
+
});
589
+
assert.strictEqual(status, 400);
590
+
assert.strictEqual(data.error, 'invalid_dpop_proof');
591
+
});
592
+
593
+
it('token rejects missing DPoP', async () => {
594
+
const { status, data } = await formPost('/oauth/token', {
595
+
grant_type: 'authorization_code',
596
+
code: 'fake',
597
+
client_id: 'http://localhost:3000',
598
+
});
599
+
assert.strictEqual(status, 400);
600
+
assert.strictEqual(data.error, 'invalid_dpop_proof');
601
+
});
602
+
603
+
it('revoke returns 200 for invalid token', async () => {
604
+
const { status } = await formPost('/oauth/revoke', {
605
+
token: 'nonexistent',
606
+
client_id: 'http://localhost:3000',
607
+
});
608
+
assert.strictEqual(status, 200);
609
+
});
610
+
});
611
+
612
+
describe('OAuth flow with DPoP', () => {
613
+
it('full PAR -> authorize -> token flow', async () => {
614
+
const dpop = await DpopClient.create();
615
+
const clientId = 'http://localhost:3000';
616
+
const redirectUri = 'http://localhost:3000/callback';
617
+
const codeVerifier = randomBytes(32).toString('base64url');
618
+
619
+
// Generate code_challenge from verifier (S256)
620
+
const challengeBuffer = await crypto.subtle.digest(
621
+
'SHA-256',
622
+
new TextEncoder().encode(codeVerifier),
623
+
);
624
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
625
+
626
+
// Step 1: PAR request
627
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
628
+
const parRes = await fetch(`${BASE}/oauth/par`, {
629
+
method: 'POST',
630
+
headers: {
631
+
'Content-Type': 'application/x-www-form-urlencoded',
632
+
DPoP: parProof,
633
+
},
634
+
body: new URLSearchParams({
635
+
client_id: clientId,
636
+
redirect_uri: redirectUri,
637
+
response_type: 'code',
638
+
scope: 'atproto',
639
+
code_challenge: codeChallenge,
640
+
code_challenge_method: 'S256',
641
+
state: 'test-state',
642
+
login_hint: DID,
643
+
}).toString(),
644
+
});
645
+
646
+
assert.strictEqual(parRes.status, 200, 'PAR should succeed');
647
+
const parData = await parRes.json();
648
+
assert.ok(parData.request_uri, 'PAR should return request_uri');
649
+
assert.ok(parData.expires_in > 0, 'PAR should return expires_in');
650
+
651
+
// Step 2: Authorization (simulate user consent by POSTing to authorize)
652
+
const authRes = await fetch(`${BASE}/oauth/authorize`, {
653
+
method: 'POST',
654
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
655
+
body: new URLSearchParams({
656
+
request_uri: parData.request_uri,
657
+
client_id: clientId,
658
+
password: PASSWORD,
659
+
}).toString(),
660
+
redirect: 'manual',
661
+
});
662
+
663
+
assert.strictEqual(authRes.status, 302, 'Authorize should redirect');
664
+
const location = authRes.headers.get('location');
665
+
assert.ok(location, 'Should have Location header');
666
+
667
+
const redirectUrl = new URL(location);
668
+
const authCode = redirectUrl.searchParams.get('code');
669
+
assert.ok(authCode, 'Redirect should have code');
670
+
assert.strictEqual(redirectUrl.searchParams.get('state'), 'test-state');
671
+
assert.strictEqual(redirectUrl.searchParams.get('iss'), BASE);
672
+
673
+
// Step 3: Token exchange
674
+
const tokenProof = await dpop.createProof('POST', `${BASE}/oauth/token`);
675
+
const tokenRes = await fetch(`${BASE}/oauth/token`, {
676
+
method: 'POST',
677
+
headers: {
678
+
'Content-Type': 'application/x-www-form-urlencoded',
679
+
DPoP: tokenProof,
680
+
},
681
+
body: new URLSearchParams({
682
+
grant_type: 'authorization_code',
683
+
code: authCode,
684
+
client_id: clientId,
685
+
redirect_uri: redirectUri,
686
+
code_verifier: codeVerifier,
687
+
}).toString(),
688
+
});
689
+
690
+
assert.strictEqual(tokenRes.status, 200, 'Token exchange should succeed');
691
+
const tokenData = await tokenRes.json();
692
+
assert.ok(tokenData.access_token, 'Should return access_token');
693
+
assert.ok(tokenData.refresh_token, 'Should return refresh_token');
694
+
assert.strictEqual(tokenData.token_type, 'DPoP');
695
+
assert.strictEqual(tokenData.scope, 'atproto');
696
+
assert.ok(tokenData.sub, 'Should return sub');
697
+
698
+
// Step 4: Use access token with DPoP for protected endpoint
699
+
const resourceProof = await dpop.createProof(
700
+
'GET',
701
+
`${BASE}/xrpc/com.atproto.server.getSession`,
702
+
tokenData.access_token,
703
+
);
704
+
const sessionRes = await fetch(
705
+
`${BASE}/xrpc/com.atproto.server.getSession`,
706
+
{
707
+
headers: {
708
+
Authorization: `DPoP ${tokenData.access_token}`,
709
+
DPoP: resourceProof,
710
+
},
711
+
},
712
+
);
713
+
714
+
assert.strictEqual(
715
+
sessionRes.status,
716
+
200,
717
+
'Protected endpoint should work with DPoP token',
718
+
);
719
+
const sessionData = await sessionRes.json();
720
+
assert.ok(sessionData.did, 'Should return session data');
721
+
722
+
// Step 5: Refresh token
723
+
const refreshProof = await dpop.createProof(
724
+
'POST',
725
+
`${BASE}/oauth/token`,
726
+
);
727
+
const refreshRes = await fetch(`${BASE}/oauth/token`, {
728
+
method: 'POST',
729
+
headers: {
730
+
'Content-Type': 'application/x-www-form-urlencoded',
731
+
DPoP: refreshProof,
732
+
},
733
+
body: new URLSearchParams({
734
+
grant_type: 'refresh_token',
735
+
refresh_token: tokenData.refresh_token,
736
+
client_id: clientId,
737
+
}).toString(),
738
+
});
739
+
740
+
assert.strictEqual(refreshRes.status, 200, 'Refresh should succeed');
741
+
const refreshData = await refreshRes.json();
742
+
assert.ok(refreshData.access_token, 'Should return new access_token');
743
+
assert.ok(refreshData.refresh_token, 'Should return new refresh_token');
744
+
745
+
// Step 6: Revoke token
746
+
const revokeRes = await fetch(`${BASE}/oauth/revoke`, {
747
+
method: 'POST',
748
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
749
+
body: new URLSearchParams({
750
+
token: refreshData.refresh_token,
751
+
client_id: clientId,
752
+
}).toString(),
753
+
});
754
+
assert.strictEqual(revokeRes.status, 200);
755
+
});
756
+
757
+
it('DPoP key mismatch rejected', async () => {
758
+
const dpop1 = await DpopClient.create();
759
+
const dpop2 = await DpopClient.create();
760
+
const clientId = 'http://localhost:3000';
761
+
const redirectUri = 'http://localhost:3000/callback';
762
+
const codeVerifier = randomBytes(32).toString('base64url');
763
+
const challengeBuffer = await crypto.subtle.digest(
764
+
'SHA-256',
765
+
new TextEncoder().encode(codeVerifier),
766
+
);
767
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
768
+
769
+
// PAR with first key
770
+
const parProof = await dpop1.createProof('POST', `${BASE}/oauth/par`);
771
+
const parRes = await fetch(`${BASE}/oauth/par`, {
772
+
method: 'POST',
773
+
headers: {
774
+
'Content-Type': 'application/x-www-form-urlencoded',
775
+
DPoP: parProof,
776
+
},
777
+
body: new URLSearchParams({
778
+
client_id: clientId,
779
+
redirect_uri: redirectUri,
780
+
response_type: 'code',
781
+
scope: 'atproto',
782
+
code_challenge: codeChallenge,
783
+
code_challenge_method: 'S256',
784
+
login_hint: DID,
785
+
}).toString(),
786
+
});
787
+
const parData = await parRes.json();
788
+
789
+
// Authorize
790
+
const authRes = await fetch(`${BASE}/oauth/authorize`, {
791
+
method: 'POST',
792
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
793
+
body: new URLSearchParams({
794
+
request_uri: parData.request_uri,
795
+
client_id: clientId,
796
+
password: PASSWORD,
797
+
}).toString(),
798
+
redirect: 'manual',
799
+
});
800
+
const location = authRes.headers.get('location');
801
+
const authCode = new URL(location).searchParams.get('code');
802
+
803
+
// Token with DIFFERENT key should fail
804
+
const tokenProof = await dpop2.createProof('POST', `${BASE}/oauth/token`);
805
+
const tokenRes = await fetch(`${BASE}/oauth/token`, {
806
+
method: 'POST',
807
+
headers: {
808
+
'Content-Type': 'application/x-www-form-urlencoded',
809
+
DPoP: tokenProof,
810
+
},
811
+
body: new URLSearchParams({
812
+
grant_type: 'authorization_code',
813
+
code: authCode,
814
+
client_id: clientId,
815
+
redirect_uri: redirectUri,
816
+
code_verifier: codeVerifier,
817
+
}).toString(),
818
+
});
819
+
820
+
assert.strictEqual(tokenRes.status, 400);
821
+
const tokenData = await tokenRes.json();
822
+
assert.strictEqual(tokenData.error, 'invalid_dpop_proof');
823
+
});
824
+
825
+
it('fragment response_mode returns code in fragment', async () => {
826
+
const dpop = await DpopClient.create();
827
+
const clientId = 'http://localhost:3000';
828
+
const redirectUri = 'http://localhost:3000/callback';
829
+
const codeVerifier = randomBytes(32).toString('base64url');
830
+
const challengeBuffer = await crypto.subtle.digest(
831
+
'SHA-256',
832
+
new TextEncoder().encode(codeVerifier),
833
+
);
834
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
835
+
836
+
// PAR with response_mode=fragment
837
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
838
+
const parRes = await fetch(`${BASE}/oauth/par`, {
839
+
method: 'POST',
840
+
headers: {
841
+
'Content-Type': 'application/x-www-form-urlencoded',
842
+
DPoP: parProof,
843
+
},
844
+
body: new URLSearchParams({
845
+
client_id: clientId,
846
+
redirect_uri: redirectUri,
847
+
response_type: 'code',
848
+
response_mode: 'fragment',
849
+
scope: 'atproto',
850
+
code_challenge: codeChallenge,
851
+
code_challenge_method: 'S256',
852
+
login_hint: DID,
853
+
}).toString(),
854
+
});
855
+
const parData = await parRes.json();
856
+
assert.ok(parData.request_uri);
857
+
858
+
// Authorize
859
+
const authRes = await fetch(`${BASE}/oauth/authorize`, {
860
+
method: 'POST',
861
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
862
+
body: new URLSearchParams({
863
+
request_uri: parData.request_uri,
864
+
client_id: clientId,
865
+
password: PASSWORD,
866
+
}).toString(),
867
+
redirect: 'manual',
868
+
});
869
+
870
+
assert.strictEqual(authRes.status, 302);
871
+
const location = authRes.headers.get('location');
872
+
assert.ok(location);
873
+
// For fragment mode, code should be in hash fragment
874
+
assert.ok(location.includes('#'), 'Should use fragment');
875
+
const url = new URL(location);
876
+
const fragment = new URLSearchParams(url.hash.slice(1));
877
+
assert.ok(fragment.get('code'), 'Code should be in fragment');
878
+
assert.ok(fragment.get('iss'), 'Issuer should be in fragment');
879
+
});
880
+
881
+
it('PKCE failure - wrong code_verifier rejected', async () => {
882
+
const dpop = await DpopClient.create();
883
+
const clientId = 'http://localhost:3000';
884
+
const redirectUri = 'http://localhost:3000/callback';
885
+
const codeVerifier = randomBytes(32).toString('base64url');
886
+
const wrongVerifier = randomBytes(32).toString('base64url');
887
+
const challengeBuffer = await crypto.subtle.digest(
888
+
'SHA-256',
889
+
new TextEncoder().encode(codeVerifier),
890
+
);
891
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
892
+
893
+
// PAR
894
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
895
+
const parRes = await fetch(`${BASE}/oauth/par`, {
896
+
method: 'POST',
897
+
headers: {
898
+
'Content-Type': 'application/x-www-form-urlencoded',
899
+
DPoP: parProof,
900
+
},
901
+
body: new URLSearchParams({
902
+
client_id: clientId,
903
+
redirect_uri: redirectUri,
904
+
response_type: 'code',
905
+
scope: 'atproto',
906
+
code_challenge: codeChallenge,
907
+
code_challenge_method: 'S256',
908
+
login_hint: DID,
909
+
}).toString(),
910
+
});
911
+
const parData = await parRes.json();
912
+
913
+
// Authorize
914
+
const authRes = await fetch(`${BASE}/oauth/authorize`, {
915
+
method: 'POST',
916
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
917
+
body: new URLSearchParams({
918
+
request_uri: parData.request_uri,
919
+
client_id: clientId,
920
+
password: PASSWORD,
921
+
}).toString(),
922
+
redirect: 'manual',
923
+
});
924
+
const location = authRes.headers.get('location');
925
+
const authCode = new URL(location).searchParams.get('code');
926
+
927
+
// Token with WRONG code_verifier should fail
928
+
const tokenProof = await dpop.createProof('POST', `${BASE}/oauth/token`);
929
+
const tokenRes = await fetch(`${BASE}/oauth/token`, {
930
+
method: 'POST',
931
+
headers: {
932
+
'Content-Type': 'application/x-www-form-urlencoded',
933
+
DPoP: tokenProof,
934
+
},
935
+
body: new URLSearchParams({
936
+
grant_type: 'authorization_code',
937
+
code: authCode,
938
+
client_id: clientId,
939
+
redirect_uri: redirectUri,
940
+
code_verifier: wrongVerifier,
941
+
}).toString(),
942
+
});
943
+
944
+
assert.strictEqual(tokenRes.status, 400);
945
+
const tokenData = await tokenRes.json();
946
+
assert.strictEqual(tokenData.error, 'invalid_grant');
947
+
assert.ok(tokenData.message?.includes('code_verifier'));
948
+
});
949
+
950
+
it('redirect_uri mismatch rejected', async () => {
951
+
const dpop = await DpopClient.create();
952
+
const clientId = 'http://localhost:3000';
953
+
const codeVerifier = randomBytes(32).toString('base64url');
954
+
const challengeBuffer = await crypto.subtle.digest(
955
+
'SHA-256',
956
+
new TextEncoder().encode(codeVerifier),
957
+
);
958
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
959
+
960
+
// PAR with unregistered redirect_uri
961
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
962
+
const parRes = await fetch(`${BASE}/oauth/par`, {
963
+
method: 'POST',
964
+
headers: {
965
+
'Content-Type': 'application/x-www-form-urlencoded',
966
+
DPoP: parProof,
967
+
},
968
+
body: new URLSearchParams({
969
+
client_id: clientId,
970
+
redirect_uri: 'http://attacker.com/callback',
971
+
response_type: 'code',
972
+
scope: 'atproto',
973
+
code_challenge: codeChallenge,
974
+
code_challenge_method: 'S256',
975
+
login_hint: DID,
976
+
}).toString(),
977
+
});
978
+
979
+
assert.strictEqual(parRes.status, 400);
980
+
const parData = await parRes.json();
981
+
assert.strictEqual(parData.error, 'invalid_request');
982
+
assert.ok(parData.message?.includes('redirect_uri'));
983
+
});
984
+
985
+
it('DPoP jti replay rejected', async () => {
986
+
const dpop = await DpopClient.create();
987
+
const clientId = 'http://localhost:3000';
988
+
const redirectUri = 'http://localhost:3000/callback';
989
+
const codeVerifier = randomBytes(32).toString('base64url');
990
+
const challengeBuffer = await crypto.subtle.digest(
991
+
'SHA-256',
992
+
new TextEncoder().encode(codeVerifier),
993
+
);
994
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
995
+
996
+
// Create a single DPoP proof
997
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
998
+
999
+
// First request should succeed
1000
+
const parRes1 = await fetch(`${BASE}/oauth/par`, {
1001
+
method: 'POST',
1002
+
headers: {
1003
+
'Content-Type': 'application/x-www-form-urlencoded',
1004
+
DPoP: parProof,
1005
+
},
1006
+
body: new URLSearchParams({
1007
+
client_id: clientId,
1008
+
redirect_uri: redirectUri,
1009
+
response_type: 'code',
1010
+
scope: 'atproto',
1011
+
code_challenge: codeChallenge,
1012
+
code_challenge_method: 'S256',
1013
+
login_hint: DID,
1014
+
}).toString(),
1015
+
});
1016
+
assert.strictEqual(parRes1.status, 200);
1017
+
1018
+
// Second request with SAME proof should be rejected
1019
+
const parRes2 = await fetch(`${BASE}/oauth/par`, {
1020
+
method: 'POST',
1021
+
headers: {
1022
+
'Content-Type': 'application/x-www-form-urlencoded',
1023
+
DPoP: parProof,
1024
+
},
1025
+
body: new URLSearchParams({
1026
+
client_id: clientId,
1027
+
redirect_uri: redirectUri,
1028
+
response_type: 'code',
1029
+
scope: 'atproto',
1030
+
code_challenge: codeChallenge,
1031
+
code_challenge_method: 'S256',
1032
+
login_hint: DID,
1033
+
}).toString(),
1034
+
});
1035
+
1036
+
assert.strictEqual(parRes2.status, 400);
1037
+
const data = await parRes2.json();
1038
+
assert.strictEqual(data.error, 'invalid_dpop_proof');
1039
+
assert.ok(data.message?.includes('replay'));
1040
+
});
1041
+
});
1042
+
1043
+
describe('Scope Enforcement', () => {
1044
+
it('createRecord denied with insufficient scope', async () => {
1045
+
// Get token that only allows creating likes, not posts
1046
+
const { accessToken, dpop } = await getOAuthTokenWithScope(
1047
+
'repo:app.bsky.feed.like?action=create',
1048
+
DID,
1049
+
PASSWORD,
1050
+
);
1051
+
1052
+
const proof = await dpop.createProof(
1053
+
'POST',
1054
+
`${BASE}/xrpc/com.atproto.repo.createRecord`,
1055
+
accessToken,
1056
+
);
1057
+
1058
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.createRecord`, {
1059
+
method: 'POST',
1060
+
headers: {
1061
+
'Content-Type': 'application/json',
1062
+
Authorization: `DPoP ${accessToken}`,
1063
+
DPoP: proof,
1064
+
},
1065
+
body: JSON.stringify({
1066
+
repo: DID,
1067
+
collection: 'app.bsky.feed.post', // Not allowed by scope
1068
+
record: { text: 'test', createdAt: new Date().toISOString() },
1069
+
}),
1070
+
});
1071
+
1072
+
assert.strictEqual(res.status, 403, 'Should reject with 403');
1073
+
const body = await res.json();
1074
+
assert.ok(
1075
+
body.message?.includes('Missing required scope'),
1076
+
'Error should mention missing scope',
1077
+
);
1078
+
});
1079
+
1080
+
it('createRecord allowed with matching scope', async () => {
1081
+
// Get token that allows creating posts
1082
+
const { accessToken, dpop } = await getOAuthTokenWithScope(
1083
+
'repo:app.bsky.feed.post?action=create',
1084
+
DID,
1085
+
PASSWORD,
1086
+
);
1087
+
1088
+
const proof = await dpop.createProof(
1089
+
'POST',
1090
+
`${BASE}/xrpc/com.atproto.repo.createRecord`,
1091
+
accessToken,
1092
+
);
1093
+
1094
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.createRecord`, {
1095
+
method: 'POST',
1096
+
headers: {
1097
+
'Content-Type': 'application/json',
1098
+
Authorization: `DPoP ${accessToken}`,
1099
+
DPoP: proof,
1100
+
},
1101
+
body: JSON.stringify({
1102
+
repo: DID,
1103
+
collection: 'app.bsky.feed.post',
1104
+
record: { text: 'scope test', createdAt: new Date().toISOString() },
1105
+
}),
1106
+
});
1107
+
1108
+
assert.strictEqual(res.status, 200, 'Should allow with correct scope');
1109
+
const body = await res.json();
1110
+
assert.ok(body.uri, 'Should return uri');
1111
+
1112
+
// Note: We don't clean up here because our token only has create scope
1113
+
// The record will be cleaned up by subsequent tests with full-access tokens
1114
+
});
1115
+
1116
+
it('createRecord allowed with wildcard collection scope', async () => {
1117
+
// Get token that allows creating any record type
1118
+
const { accessToken, dpop } = await getOAuthTokenWithScope(
1119
+
'repo:*?action=create',
1120
+
DID,
1121
+
PASSWORD,
1122
+
);
1123
+
1124
+
const proof = await dpop.createProof(
1125
+
'POST',
1126
+
`${BASE}/xrpc/com.atproto.repo.createRecord`,
1127
+
accessToken,
1128
+
);
1129
+
1130
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.createRecord`, {
1131
+
method: 'POST',
1132
+
headers: {
1133
+
'Content-Type': 'application/json',
1134
+
Authorization: `DPoP ${accessToken}`,
1135
+
DPoP: proof,
1136
+
},
1137
+
body: JSON.stringify({
1138
+
repo: DID,
1139
+
collection: 'app.bsky.feed.post',
1140
+
record: {
1141
+
text: 'wildcard scope test',
1142
+
createdAt: new Date().toISOString(),
1143
+
},
1144
+
}),
1145
+
});
1146
+
1147
+
assert.strictEqual(
1148
+
res.status,
1149
+
200,
1150
+
'Wildcard scope should allow any collection',
1151
+
);
1152
+
});
1153
+
1154
+
it('deleteRecord denied without delete scope', async () => {
1155
+
// Get token that only has create scope
1156
+
const { accessToken, dpop } = await getOAuthTokenWithScope(
1157
+
'repo:app.bsky.feed.post?action=create',
1158
+
DID,
1159
+
PASSWORD,
1160
+
);
1161
+
1162
+
const proof = await dpop.createProof(
1163
+
'POST',
1164
+
`${BASE}/xrpc/com.atproto.repo.deleteRecord`,
1165
+
accessToken,
1166
+
);
1167
+
1168
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.deleteRecord`, {
1169
+
method: 'POST',
1170
+
headers: {
1171
+
'Content-Type': 'application/json',
1172
+
Authorization: `DPoP ${accessToken}`,
1173
+
DPoP: proof,
1174
+
},
1175
+
body: JSON.stringify({
1176
+
repo: DID,
1177
+
collection: 'app.bsky.feed.post',
1178
+
rkey: 'nonexistent', // Doesn't matter, should fail on scope first
1179
+
}),
1180
+
});
1181
+
1182
+
assert.strictEqual(
1183
+
res.status,
1184
+
403,
1185
+
'Should reject delete without delete scope',
1186
+
);
1187
+
});
1188
+
1189
+
it('uploadBlob denied with mismatched MIME scope', async () => {
1190
+
// Get token that only allows image uploads
1191
+
const { accessToken, dpop } = await getOAuthTokenWithScope(
1192
+
'blob:image/*',
1193
+
DID,
1194
+
PASSWORD,
1195
+
);
1196
+
1197
+
const proof = await dpop.createProof(
1198
+
'POST',
1199
+
`${BASE}/xrpc/com.atproto.repo.uploadBlob`,
1200
+
accessToken,
1201
+
);
1202
+
1203
+
// Try to upload a video (not allowed by scope)
1204
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.uploadBlob`, {
1205
+
method: 'POST',
1206
+
headers: {
1207
+
'Content-Type': 'video/mp4',
1208
+
Authorization: `DPoP ${accessToken}`,
1209
+
DPoP: proof,
1210
+
},
1211
+
body: new Uint8Array([0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70]), // Fake MP4 header
1212
+
});
1213
+
1214
+
assert.strictEqual(
1215
+
res.status,
1216
+
403,
1217
+
'Should reject video upload with image-only scope',
1218
+
);
1219
+
const body = await res.json();
1220
+
assert.ok(
1221
+
body.message?.includes('Missing required scope'),
1222
+
'Error should mention missing scope',
1223
+
);
1224
+
});
1225
+
1226
+
it('uploadBlob allowed with matching MIME scope', async () => {
1227
+
// Get token that allows image uploads
1228
+
const { accessToken, dpop } = await getOAuthTokenWithScope(
1229
+
'blob:image/*',
1230
+
DID,
1231
+
PASSWORD,
1232
+
);
1233
+
1234
+
const proof = await dpop.createProof(
1235
+
'POST',
1236
+
`${BASE}/xrpc/com.atproto.repo.uploadBlob`,
1237
+
accessToken,
1238
+
);
1239
+
1240
+
// Minimal PNG
1241
+
const pngBytes = new Uint8Array([
1242
+
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d,
1243
+
0x49, 0x48, 0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01,
1244
+
0x08, 0x06, 0x00, 0x00, 0x00, 0x1f, 0x15, 0xc4, 0x89, 0x00, 0x00, 0x00,
1245
+
0x0a, 0x49, 0x44, 0x41, 0x54, 0x78, 0x9c, 0x63, 0x00, 0x01, 0x00, 0x00,
1246
+
0x05, 0x00, 0x01, 0x0d, 0x0a, 0x2d, 0xb4, 0x00, 0x00, 0x00, 0x00, 0x49,
1247
+
0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82,
1248
+
]);
1249
+
1250
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.uploadBlob`, {
1251
+
method: 'POST',
1252
+
headers: {
1253
+
'Content-Type': 'image/png',
1254
+
Authorization: `DPoP ${accessToken}`,
1255
+
DPoP: proof,
1256
+
},
1257
+
body: pngBytes,
1258
+
});
1259
+
1260
+
assert.strictEqual(
1261
+
res.status,
1262
+
200,
1263
+
'Should allow image upload with image scope',
1264
+
);
1265
+
});
1266
+
1267
+
it('transition:generic grants full access', async () => {
1268
+
// Get token with transition:generic scope (full access)
1269
+
const { accessToken, dpop } = await getOAuthTokenWithScope(
1270
+
'transition:generic',
1271
+
DID,
1272
+
PASSWORD,
1273
+
);
1274
+
1275
+
const proof = await dpop.createProof(
1276
+
'POST',
1277
+
`${BASE}/xrpc/com.atproto.repo.createRecord`,
1278
+
accessToken,
1279
+
);
1280
+
1281
+
const res = await fetch(`${BASE}/xrpc/com.atproto.repo.createRecord`, {
1282
+
method: 'POST',
1283
+
headers: {
1284
+
'Content-Type': 'application/json',
1285
+
Authorization: `DPoP ${accessToken}`,
1286
+
DPoP: proof,
1287
+
},
1288
+
body: JSON.stringify({
1289
+
repo: DID,
1290
+
collection: 'app.bsky.feed.post',
1291
+
record: {
1292
+
text: 'transition scope test',
1293
+
createdAt: new Date().toISOString(),
1294
+
},
1295
+
}),
1296
+
});
1297
+
1298
+
assert.strictEqual(
1299
+
res.status,
1300
+
200,
1301
+
'transition:generic should grant full access',
1302
+
);
1303
+
});
1304
+
});
1305
+
1306
+
describe('Consent page display', () => {
1307
+
it('consent page shows permissions table for granular scopes', async () => {
1308
+
const dpop = await DpopClient.create();
1309
+
const clientId = 'http://localhost:3000';
1310
+
const redirectUri = 'http://localhost:3000/callback';
1311
+
const codeVerifier = randomBytes(32).toString('base64url');
1312
+
1313
+
const challengeBuffer = await crypto.subtle.digest(
1314
+
'SHA-256',
1315
+
new TextEncoder().encode(codeVerifier),
1316
+
);
1317
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1318
+
1319
+
// PAR request with granular scopes
1320
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
1321
+
const parRes = await fetch(`${BASE}/oauth/par`, {
1322
+
method: 'POST',
1323
+
headers: {
1324
+
'Content-Type': 'application/x-www-form-urlencoded',
1325
+
DPoP: parProof,
1326
+
},
1327
+
body: new URLSearchParams({
1328
+
client_id: clientId,
1329
+
redirect_uri: redirectUri,
1330
+
response_type: 'code',
1331
+
scope:
1332
+
'atproto repo:app.bsky.feed.post?action=create&action=update blob:image/*',
1333
+
code_challenge: codeChallenge,
1334
+
code_challenge_method: 'S256',
1335
+
state: 'test-state',
1336
+
login_hint: DID,
1337
+
}).toString(),
1338
+
});
1339
+
1340
+
assert.strictEqual(parRes.status, 200, 'PAR should succeed');
1341
+
const { request_uri } = await parRes.json();
1342
+
1343
+
// GET the authorize page
1344
+
const authorizeRes = await fetch(
1345
+
`${BASE}/oauth/authorize?client_id=${encodeURIComponent(clientId)}&request_uri=${encodeURIComponent(request_uri)}`,
1346
+
);
1347
+
1348
+
const html = await authorizeRes.text();
1349
+
1350
+
// Verify permissions table is rendered
1351
+
assert.ok(
1352
+
html.includes('Repository permissions:'),
1353
+
'Should show repo permissions section',
1354
+
);
1355
+
assert.ok(
1356
+
html.includes('app.bsky.feed.post'),
1357
+
'Should show collection name',
1358
+
);
1359
+
assert.ok(
1360
+
html.includes('Upload permissions:'),
1361
+
'Should show upload permissions section',
1362
+
);
1363
+
assert.ok(html.includes('image/*'), 'Should show blob MIME type');
1364
+
});
1365
+
1366
+
it('consent page shows identity message for atproto-only scope', async () => {
1367
+
const dpop = await DpopClient.create();
1368
+
const clientId = 'http://localhost:3000';
1369
+
const redirectUri = 'http://localhost:3000/callback';
1370
+
const codeVerifier = randomBytes(32).toString('base64url');
1371
+
1372
+
const challengeBuffer = await crypto.subtle.digest(
1373
+
'SHA-256',
1374
+
new TextEncoder().encode(codeVerifier),
1375
+
);
1376
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1377
+
1378
+
// PAR request with atproto only (identity-only)
1379
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
1380
+
const parRes = await fetch(`${BASE}/oauth/par`, {
1381
+
method: 'POST',
1382
+
headers: {
1383
+
'Content-Type': 'application/x-www-form-urlencoded',
1384
+
DPoP: parProof,
1385
+
},
1386
+
body: new URLSearchParams({
1387
+
client_id: clientId,
1388
+
redirect_uri: redirectUri,
1389
+
response_type: 'code',
1390
+
scope: 'atproto',
1391
+
code_challenge: codeChallenge,
1392
+
code_challenge_method: 'S256',
1393
+
state: 'test-state',
1394
+
login_hint: DID,
1395
+
}).toString(),
1396
+
});
1397
+
1398
+
assert.strictEqual(parRes.status, 200, 'PAR should succeed');
1399
+
const { request_uri } = await parRes.json();
1400
+
1401
+
// GET the authorize page
1402
+
const authorizeRes = await fetch(
1403
+
`${BASE}/oauth/authorize?client_id=${encodeURIComponent(clientId)}&request_uri=${encodeURIComponent(request_uri)}`,
1404
+
);
1405
+
1406
+
const html = await authorizeRes.text();
1407
+
1408
+
// Verify identity-only message
1409
+
assert.ok(
1410
+
html.includes('wants to uniquely identify you'),
1411
+
'Should show identity-only message',
1412
+
);
1413
+
assert.ok(
1414
+
!html.includes('Repository permissions:'),
1415
+
'Should NOT show permissions table',
1416
+
);
1417
+
});
1418
+
1419
+
it('consent page shows warning for transition:generic scope', async () => {
1420
+
const dpop = await DpopClient.create();
1421
+
const clientId = 'http://localhost:3000';
1422
+
const redirectUri = 'http://localhost:3000/callback';
1423
+
const codeVerifier = randomBytes(32).toString('base64url');
1424
+
1425
+
const challengeBuffer = await crypto.subtle.digest(
1426
+
'SHA-256',
1427
+
new TextEncoder().encode(codeVerifier),
1428
+
);
1429
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1430
+
1431
+
// PAR request with transition:generic (full access)
1432
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
1433
+
const parRes = await fetch(`${BASE}/oauth/par`, {
1434
+
method: 'POST',
1435
+
headers: {
1436
+
'Content-Type': 'application/x-www-form-urlencoded',
1437
+
DPoP: parProof,
1438
+
},
1439
+
body: new URLSearchParams({
1440
+
client_id: clientId,
1441
+
redirect_uri: redirectUri,
1442
+
response_type: 'code',
1443
+
scope: 'atproto transition:generic',
1444
+
code_challenge: codeChallenge,
1445
+
code_challenge_method: 'S256',
1446
+
state: 'test-state',
1447
+
login_hint: DID,
1448
+
}).toString(),
1449
+
});
1450
+
1451
+
assert.strictEqual(parRes.status, 200, 'PAR should succeed');
1452
+
const { request_uri } = await parRes.json();
1453
+
1454
+
// GET the authorize page
1455
+
const authorizeRes = await fetch(
1456
+
`${BASE}/oauth/authorize?client_id=${encodeURIComponent(clientId)}&request_uri=${encodeURIComponent(request_uri)}`,
1457
+
);
1458
+
1459
+
const html = await authorizeRes.text();
1460
+
1461
+
// Verify warning banner
1462
+
assert.ok(
1463
+
html.includes('Full repository access requested'),
1464
+
'Should show full access warning',
1465
+
);
1466
+
});
1467
+
1468
+
it('supports direct authorization without PAR', async () => {
1469
+
const clientId = 'http://localhost:3000';
1470
+
const redirectUri = 'http://localhost:3000/callback';
1471
+
const codeVerifier = 'test-verifier-for-direct-auth-flow-min-43-chars!!';
1472
+
const challengeBuffer = await crypto.subtle.digest(
1473
+
'SHA-256',
1474
+
new TextEncoder().encode(codeVerifier),
1475
+
);
1476
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1477
+
const state = 'test-direct-auth-state';
1478
+
1479
+
// Step 1: GET authorize with direct parameters (no PAR)
1480
+
const authorizeUrl = new URL(`${BASE}/oauth/authorize`);
1481
+
authorizeUrl.searchParams.set('client_id', clientId);
1482
+
authorizeUrl.searchParams.set('redirect_uri', redirectUri);
1483
+
authorizeUrl.searchParams.set('response_type', 'code');
1484
+
authorizeUrl.searchParams.set('scope', 'atproto');
1485
+
authorizeUrl.searchParams.set('code_challenge', codeChallenge);
1486
+
authorizeUrl.searchParams.set('code_challenge_method', 'S256');
1487
+
authorizeUrl.searchParams.set('state', state);
1488
+
authorizeUrl.searchParams.set('login_hint', DID);
1489
+
1490
+
const getRes = await fetch(authorizeUrl.toString());
1491
+
assert.strictEqual(
1492
+
getRes.status,
1493
+
200,
1494
+
'Direct authorize GET should succeed',
1495
+
);
1496
+
1497
+
const html = await getRes.text();
1498
+
assert.ok(html.includes('Authorize'), 'Should show consent page');
1499
+
assert.ok(
1500
+
html.includes('request_uri'),
1501
+
'Should include request_uri in form',
1502
+
);
1503
+
});
1504
+
1505
+
it('completes full direct authorization flow', async () => {
1506
+
const clientId = 'http://localhost:3000';
1507
+
const redirectUri = 'http://localhost:3000/callback';
1508
+
const codeVerifier = 'test-verifier-for-direct-auth-flow-min-43-chars!!';
1509
+
const challengeBuffer = await crypto.subtle.digest(
1510
+
'SHA-256',
1511
+
new TextEncoder().encode(codeVerifier),
1512
+
);
1513
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1514
+
const state = 'test-direct-auth-state';
1515
+
1516
+
// Step 1: GET authorize with direct parameters
1517
+
const authorizeUrl = new URL(`${BASE}/oauth/authorize`);
1518
+
authorizeUrl.searchParams.set('client_id', clientId);
1519
+
authorizeUrl.searchParams.set('redirect_uri', redirectUri);
1520
+
authorizeUrl.searchParams.set('response_type', 'code');
1521
+
authorizeUrl.searchParams.set('scope', 'atproto');
1522
+
authorizeUrl.searchParams.set('code_challenge', codeChallenge);
1523
+
authorizeUrl.searchParams.set('code_challenge_method', 'S256');
1524
+
authorizeUrl.searchParams.set('state', state);
1525
+
authorizeUrl.searchParams.set('login_hint', DID);
1526
+
1527
+
const getRes = await fetch(authorizeUrl.toString());
1528
+
assert.strictEqual(getRes.status, 200);
1529
+
const html = await getRes.text();
1530
+
1531
+
// Extract request_uri from the form
1532
+
const requestUriMatch = html.match(/name="request_uri" value="([^"]+)"/);
1533
+
assert.ok(requestUriMatch, 'Should have request_uri in form');
1534
+
const requestUri = requestUriMatch[1];
1535
+
1536
+
// Step 2: POST to authorize (user approval)
1537
+
const authRes = await fetch(`${BASE}/oauth/authorize`, {
1538
+
method: 'POST',
1539
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
1540
+
body: new URLSearchParams({
1541
+
request_uri: requestUri,
1542
+
client_id: clientId,
1543
+
password: PASSWORD,
1544
+
}).toString(),
1545
+
redirect: 'manual',
1546
+
});
1547
+
1548
+
assert.strictEqual(authRes.status, 302, 'Should redirect after approval');
1549
+
const location = authRes.headers.get('location');
1550
+
assert.ok(location, 'Should have Location header');
1551
+
const locationUrl = new URL(location);
1552
+
const code = locationUrl.searchParams.get('code');
1553
+
assert.ok(code, 'Should have authorization code');
1554
+
assert.strictEqual(locationUrl.searchParams.get('state'), state);
1555
+
1556
+
// Step 3: Exchange code for tokens
1557
+
const dpop = await DpopClient.create();
1558
+
const dpopProof = await dpop.createProof('POST', `${BASE}/oauth/token`);
1559
+
1560
+
const tokenRes = await fetch(`${BASE}/oauth/token`, {
1561
+
method: 'POST',
1562
+
headers: {
1563
+
'Content-Type': 'application/x-www-form-urlencoded',
1564
+
DPoP: dpopProof,
1565
+
},
1566
+
body: new URLSearchParams({
1567
+
grant_type: 'authorization_code',
1568
+
code,
1569
+
redirect_uri: redirectUri,
1570
+
client_id: clientId,
1571
+
code_verifier: codeVerifier,
1572
+
}).toString(),
1573
+
});
1574
+
1575
+
assert.strictEqual(tokenRes.status, 200, 'Token exchange should succeed');
1576
+
const tokenData = await tokenRes.json();
1577
+
assert.ok(tokenData.access_token, 'Should have access_token');
1578
+
assert.strictEqual(tokenData.token_type, 'DPoP');
1579
+
});
1580
+
1581
+
it('consent page shows profile card when login_hint is provided', async () => {
1582
+
const clientId = 'http://localhost:3000';
1583
+
const redirectUri = 'http://localhost:3000/callback';
1584
+
const codeVerifier = 'test-verifier-for-profile-card-test-min-43-chars!!';
1585
+
const challengeBuffer = await crypto.subtle.digest(
1586
+
'SHA-256',
1587
+
new TextEncoder().encode(codeVerifier),
1588
+
);
1589
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1590
+
1591
+
const authorizeUrl = new URL(`${BASE}/oauth/authorize`);
1592
+
authorizeUrl.searchParams.set('client_id', clientId);
1593
+
authorizeUrl.searchParams.set('redirect_uri', redirectUri);
1594
+
authorizeUrl.searchParams.set('response_type', 'code');
1595
+
authorizeUrl.searchParams.set('scope', 'atproto');
1596
+
authorizeUrl.searchParams.set('code_challenge', codeChallenge);
1597
+
authorizeUrl.searchParams.set('code_challenge_method', 'S256');
1598
+
authorizeUrl.searchParams.set('state', 'test-state');
1599
+
authorizeUrl.searchParams.set('login_hint', 'test.handle.example');
1600
+
1601
+
const res = await fetch(authorizeUrl.toString());
1602
+
const html = await res.text();
1603
+
1604
+
assert.ok(
1605
+
html.includes('profile-card'),
1606
+
'Should include profile card element',
1607
+
);
1608
+
assert.ok(
1609
+
html.includes('@test.handle.example'),
1610
+
'Should show handle with @ prefix',
1611
+
);
1612
+
assert.ok(
1613
+
html.includes('app.bsky.actor.getProfile'),
1614
+
'Should include profile fetch script',
1615
+
);
1616
+
});
1617
+
1618
+
it('consent page does not show profile card when login_hint is omitted', async () => {
1619
+
const clientId = 'http://localhost:3000';
1620
+
const redirectUri = 'http://localhost:3000/callback';
1621
+
const codeVerifier = 'test-verifier-for-no-profile-test-min-43-chars!!';
1622
+
const challengeBuffer = await crypto.subtle.digest(
1623
+
'SHA-256',
1624
+
new TextEncoder().encode(codeVerifier),
1625
+
);
1626
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1627
+
1628
+
const authorizeUrl = new URL(`${BASE}/oauth/authorize`);
1629
+
authorizeUrl.searchParams.set('client_id', clientId);
1630
+
authorizeUrl.searchParams.set('redirect_uri', redirectUri);
1631
+
authorizeUrl.searchParams.set('response_type', 'code');
1632
+
authorizeUrl.searchParams.set('scope', 'atproto');
1633
+
authorizeUrl.searchParams.set('code_challenge', codeChallenge);
1634
+
authorizeUrl.searchParams.set('code_challenge_method', 'S256');
1635
+
authorizeUrl.searchParams.set('state', 'test-state');
1636
+
// No login_hint parameter
1637
+
1638
+
const res = await fetch(authorizeUrl.toString());
1639
+
const html = await res.text();
1640
+
1641
+
// Check for the actual element (id="profile-card"), not the CSS class selector
1642
+
assert.ok(
1643
+
!html.includes('id="profile-card"'),
1644
+
'Should NOT include profile card element',
1645
+
);
1646
+
assert.ok(
1647
+
!html.includes('app.bsky.actor.getProfile'),
1648
+
'Should NOT include profile fetch script',
1649
+
);
1650
+
});
1651
+
1652
+
it('consent page escapes dangerous characters in login_hint', async () => {
1653
+
const clientId = 'http://localhost:3000';
1654
+
const redirectUri = 'http://localhost:3000/callback';
1655
+
const codeVerifier = 'test-verifier-for-xss-test-minimum-43-chars!!!!!';
1656
+
const challengeBuffer = await crypto.subtle.digest(
1657
+
'SHA-256',
1658
+
new TextEncoder().encode(codeVerifier),
1659
+
);
1660
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
1661
+
1662
+
// Attempt XSS via login_hint with double quotes to break out of JSON.stringify
1663
+
const maliciousHint = 'user");alert("xss';
1664
+
1665
+
const authorizeUrl = new URL(`${BASE}/oauth/authorize`);
1666
+
authorizeUrl.searchParams.set('client_id', clientId);
1667
+
authorizeUrl.searchParams.set('redirect_uri', redirectUri);
1668
+
authorizeUrl.searchParams.set('response_type', 'code');
1669
+
authorizeUrl.searchParams.set('scope', 'atproto');
1670
+
authorizeUrl.searchParams.set('code_challenge', codeChallenge);
1671
+
authorizeUrl.searchParams.set('code_challenge_method', 'S256');
1672
+
authorizeUrl.searchParams.set('state', 'test-state');
1673
+
authorizeUrl.searchParams.set('login_hint', maliciousHint);
1674
+
1675
+
const res = await fetch(authorizeUrl.toString());
1676
+
const html = await res.text();
1677
+
1678
+
// JSON.stringify escapes double quotes, so the payload should be escaped
1679
+
// The raw ");alert(" should NOT appear - it should be escaped as \");alert(\"
1680
+
assert.ok(
1681
+
!html.includes('");alert("'),
1682
+
'Should escape double quotes to prevent XSS breakout',
1683
+
);
1684
+
// Verify the escaped version is present (backslash before the quote)
1685
+
assert.ok(
1686
+
html.includes('\\"'),
1687
+
'Should contain escaped characters from JSON.stringify',
1688
+
);
1689
+
});
1690
+
});
1691
+
1692
+
describe('Foreign DID proxying', () => {
1693
+
it('proxies to AppView when atproto-proxy header present', async () => {
1694
+
// Use a known public DID (bsky.app official account)
1695
+
// We expect 200 (record exists) or 400 (record deleted/not found) from AppView
1696
+
// A 502 would indicate proxy failure, 404 would indicate local handling
1697
+
const res = await fetch(
1698
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:z72i7hdynmk6r22z27h6tvur&collection=app.bsky.feed.post&rkey=3juzlwllznd24`,
1699
+
{
1700
+
headers: {
1701
+
'atproto-proxy': 'did:web:api.bsky.app#bsky_appview',
1702
+
},
1703
+
},
1704
+
);
1705
+
// AppView returns 200 (found) or 400 (RecordNotFound), not 404 or 502
1706
+
assert.ok(
1707
+
res.status === 200 || res.status === 400,
1708
+
`Expected 200 or 400 from AppView, got ${res.status}`,
1709
+
);
1710
+
// Verify we got a JSON response (not an error page)
1711
+
const contentType = res.headers.get('content-type');
1712
+
assert.ok(
1713
+
contentType?.includes('application/json'),
1714
+
'Should return JSON',
1715
+
);
1716
+
});
1717
+
1718
+
it('handles foreign repo locally without header (returns not found)', async () => {
1719
+
// Foreign DID without atproto-proxy header is handled locally
1720
+
// This returns an error since the foreign DID doesn't exist on this PDS
1721
+
const res = await fetch(
1722
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:z72i7hdynmk6r22z27h6tvur&collection=app.bsky.feed.post&rkey=3juzlwllznd24`,
1723
+
);
1724
+
// Local PDS returns 404 for non-existent record/DID
1725
+
assert.strictEqual(res.status, 404);
1726
+
});
1727
+
1728
+
it('returns error for unknown proxy service', async () => {
1729
+
const res = await fetch(
1730
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:test&collection=test&rkey=test`,
1731
+
{
1732
+
headers: {
1733
+
'atproto-proxy': 'did:web:unknown.service#unknown',
1734
+
},
1735
+
},
1736
+
);
1737
+
assert.strictEqual(res.status, 400);
1738
+
const data = await res.json();
1739
+
assert.ok(data.message.includes('Unknown proxy service'));
1740
+
});
1741
+
1742
+
it('returns error for malformed atproto-proxy header', async () => {
1743
+
// Header without fragment separator
1744
+
const res1 = await fetch(
1745
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:test&collection=test&rkey=test`,
1746
+
{
1747
+
headers: {
1748
+
'atproto-proxy': 'did:web:api.bsky.app', // missing #serviceId
1749
+
},
1750
+
},
1751
+
);
1752
+
assert.strictEqual(res1.status, 400);
1753
+
const data1 = await res1.json();
1754
+
assert.ok(data1.message.includes('Malformed atproto-proxy header'));
1755
+
1756
+
// Header with only fragment
1757
+
const res2 = await fetch(
1758
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=did:plc:test&collection=test&rkey=test`,
1759
+
{
1760
+
headers: {
1761
+
'atproto-proxy': '#bsky_appview', // missing DID
1762
+
},
1763
+
},
1764
+
);
1765
+
assert.strictEqual(res2.status, 400);
1766
+
const data2 = await res2.json();
1767
+
assert.ok(data2.message.includes('Malformed atproto-proxy header'));
1768
+
});
1769
+
1770
+
it('returns local record for local DID without proxy header', async () => {
1771
+
// Create a record first
1772
+
const { data: created } = await jsonPost(
1773
+
'/xrpc/com.atproto.repo.createRecord',
1774
+
{
1775
+
repo: DID,
1776
+
collection: 'app.bsky.feed.post',
1777
+
record: {
1778
+
$type: 'app.bsky.feed.post',
1779
+
text: 'Test post for local DID test',
1780
+
createdAt: new Date().toISOString(),
1781
+
},
1782
+
},
1783
+
{ Authorization: `Bearer ${token}` },
1784
+
);
1785
+
1786
+
// Fetch without proxy header - should get local record
1787
+
const rkey = created.uri.split('/').pop();
1788
+
const res = await fetch(
1789
+
`${BASE}/xrpc/com.atproto.repo.getRecord?repo=${DID}&collection=app.bsky.feed.post&rkey=${rkey}`,
1790
+
);
1791
+
assert.strictEqual(res.status, 200);
1792
+
const data = await res.json();
1793
+
assert.ok(data.value.text.includes('Test post for local DID test'));
1794
+
1795
+
// Cleanup - verify success to ensure test isolation
1796
+
const { status: cleanupStatus } = await jsonPost(
1797
+
'/xrpc/com.atproto.repo.deleteRecord',
1798
+
{ repo: DID, collection: 'app.bsky.feed.post', rkey },
1799
+
{ Authorization: `Bearer ${token}` },
1800
+
);
1801
+
assert.strictEqual(cleanupStatus, 200, 'Cleanup should succeed');
1802
+
});
1803
+
1804
+
it('describeRepo handles foreign DID locally', async () => {
1805
+
// Without proxy header, foreign DID is handled locally (returns error)
1806
+
const res = await fetch(
1807
+
`${BASE}/xrpc/com.atproto.repo.describeRepo?repo=did:plc:z72i7hdynmk6r22z27h6tvur`,
1808
+
);
1809
+
// Local PDS returns 404 for non-existent DID
1810
+
assert.strictEqual(res.status, 404);
1811
+
});
1812
+
1813
+
it('listRecords handles foreign DID locally', async () => {
1814
+
// Without proxy header, foreign DID is handled locally
1815
+
// listRecords returns 200 with empty records for non-existent collection
1816
+
const res = await fetch(
1817
+
`${BASE}/xrpc/com.atproto.repo.listRecords?repo=did:plc:z72i7hdynmk6r22z27h6tvur&collection=app.bsky.feed.post&limit=1`,
1818
+
);
1819
+
// Local PDS returns 200 with empty records (or 404 for completely unknown DID)
1820
+
assert.ok(
1821
+
res.status === 200 || res.status === 404,
1822
+
`Expected 200 or 404, got ${res.status}`,
1823
+
);
1824
+
});
1825
+
});
1826
+
1827
+
describe('Cleanup', () => {
1828
+
it('deleteRecord (cleanup)', async () => {
1829
+
const { status } = await jsonPost(
1830
+
'/xrpc/com.atproto.repo.deleteRecord',
1831
+
{ repo: DID, collection: 'app.bsky.feed.post', rkey: testRkey },
1832
+
{ Authorization: `Bearer ${token}` },
1833
+
);
1834
+
assert.strictEqual(status, 200);
1835
+
});
1836
+
});
1837
+
});
+114
test/helpers/dpop.js
+114
test/helpers/dpop.js
···
1
+
/**
2
+
* DPoP proof generation for e2e tests
3
+
*/
4
+
5
+
import { base64UrlEncode, computeJwkThumbprint } from '../../src/pds.js';
6
+
7
+
/**
8
+
* Generate an ES256 key pair for DPoP
9
+
* @returns {Promise<{privateKey: CryptoKey, publicKey: CryptoKey, jwk: object}>}
10
+
*/
11
+
export async function generateKeyPair() {
12
+
const keyPair = await crypto.subtle.generateKey(
13
+
{ name: 'ECDSA', namedCurve: 'P-256' },
14
+
true,
15
+
['sign', 'verify'],
16
+
);
17
+
18
+
const jwk = await crypto.subtle.exportKey('jwk', keyPair.publicKey);
19
+
const publicJwk = { kty: jwk.kty, crv: jwk.crv, x: jwk.x, y: jwk.y };
20
+
21
+
return {
22
+
privateKey: keyPair.privateKey,
23
+
publicKey: keyPair.publicKey,
24
+
jwk: publicJwk,
25
+
};
26
+
}
27
+
28
+
/**
29
+
* Create a DPoP proof JWT
30
+
* @param {object} params
31
+
* @param {CryptoKey} params.privateKey
32
+
* @param {object} params.jwk
33
+
* @param {string} params.method
34
+
* @param {string} params.url
35
+
* @param {string} [params.accessToken]
36
+
* @returns {Promise<string>}
37
+
*/
38
+
export async function createDpopProof({
39
+
privateKey,
40
+
jwk,
41
+
method,
42
+
url,
43
+
accessToken,
44
+
}) {
45
+
const header = { typ: 'dpop+jwt', alg: 'ES256', jwk };
46
+
const payload = {
47
+
jti: base64UrlEncode(crypto.getRandomValues(new Uint8Array(16))),
48
+
htm: method,
49
+
htu: url,
50
+
iat: Math.floor(Date.now() / 1000),
51
+
};
52
+
53
+
if (accessToken) {
54
+
const hash = await crypto.subtle.digest(
55
+
'SHA-256',
56
+
new TextEncoder().encode(accessToken),
57
+
);
58
+
payload.ath = base64UrlEncode(new Uint8Array(hash));
59
+
}
60
+
61
+
const headerB64 = base64UrlEncode(
62
+
new TextEncoder().encode(JSON.stringify(header)),
63
+
);
64
+
const payloadB64 = base64UrlEncode(
65
+
new TextEncoder().encode(JSON.stringify(payload)),
66
+
);
67
+
const signingInput = `${headerB64}.${payloadB64}`;
68
+
69
+
const signature = await crypto.subtle.sign(
70
+
{ name: 'ECDSA', hash: 'SHA-256' },
71
+
privateKey,
72
+
new TextEncoder().encode(signingInput),
73
+
);
74
+
75
+
return `${signingInput}.${base64UrlEncode(new Uint8Array(signature))}`;
76
+
}
77
+
78
+
/**
79
+
* DPoP client helper
80
+
*/
81
+
export class DpopClient {
82
+
#privateKey;
83
+
#jwk;
84
+
#jkt = null;
85
+
86
+
constructor(privateKey, jwk) {
87
+
this.#privateKey = privateKey;
88
+
this.#jwk = jwk;
89
+
}
90
+
91
+
static async create() {
92
+
const { privateKey, jwk } = await generateKeyPair();
93
+
return new DpopClient(privateKey, jwk);
94
+
}
95
+
96
+
async getJkt() {
97
+
if (!this.#jkt) this.#jkt = await computeJwkThumbprint(this.#jwk);
98
+
return this.#jkt;
99
+
}
100
+
101
+
getJwk() {
102
+
return this.#jwk;
103
+
}
104
+
105
+
async createProof(method, url, accessToken) {
106
+
return createDpopProof({
107
+
privateKey: this.#privateKey,
108
+
jwk: this.#jwk,
109
+
method,
110
+
url,
111
+
accessToken,
112
+
});
113
+
}
114
+
}
+157
test/helpers/oauth.js
+157
test/helpers/oauth.js
···
1
+
/**
2
+
* OAuth flow helpers for e2e tests
3
+
*/
4
+
5
+
import { randomBytes } from 'node:crypto';
6
+
import { DpopClient } from './dpop.js';
7
+
8
+
const BASE = 'http://localhost:8787';
9
+
10
+
/**
11
+
* Fetch with retry for flaky wrangler dev
12
+
* @param {string} url
13
+
* @param {RequestInit} options
14
+
* @param {number} maxAttempts
15
+
* @returns {Promise<Response>}
16
+
*/
17
+
async function fetchWithRetry(url, options, maxAttempts = 3) {
18
+
let lastError;
19
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
20
+
try {
21
+
const res = await fetch(url, options);
22
+
// Check if we got an HTML error page instead of expected response
23
+
const contentType = res.headers.get('content-type') || '';
24
+
if (!res.ok && contentType.includes('text/html')) {
25
+
// Wrangler dev error page - retry
26
+
if (attempt < maxAttempts - 1) {
27
+
await new Promise((r) => setTimeout(r, 100 * (attempt + 1)));
28
+
continue;
29
+
}
30
+
}
31
+
return res;
32
+
} catch (err) {
33
+
lastError = err;
34
+
if (attempt < maxAttempts - 1) {
35
+
await new Promise((r) => setTimeout(r, 100 * (attempt + 1)));
36
+
}
37
+
}
38
+
}
39
+
throw lastError || new Error('Fetch failed after retries');
40
+
}
41
+
42
+
/**
43
+
* Get an OAuth token with a specific scope via full PAR -> authorize -> token flow
44
+
* @param {string} scope - The scope to request
45
+
* @param {string} did - The DID to authenticate as
46
+
* @param {string} password - The password for authentication
47
+
* @returns {Promise<{accessToken: string, refreshToken: string, dpop: DpopClient}>}
48
+
*/
49
+
export async function getOAuthTokenWithScope(scope, did, password) {
50
+
const dpop = await DpopClient.create();
51
+
const clientId = 'http://localhost:3000';
52
+
const redirectUri = 'http://localhost:3000/callback';
53
+
const codeVerifier = randomBytes(32).toString('base64url');
54
+
const challengeBuffer = await crypto.subtle.digest(
55
+
'SHA-256',
56
+
new TextEncoder().encode(codeVerifier),
57
+
);
58
+
const codeChallenge = Buffer.from(challengeBuffer).toString('base64url');
59
+
60
+
// PAR request (with retry for flaky wrangler dev)
61
+
let parData;
62
+
for (let attempt = 0; attempt < 3; attempt++) {
63
+
// Generate fresh DPoP proof for each attempt
64
+
const parProof = await dpop.createProof('POST', `${BASE}/oauth/par`);
65
+
const parRes = await fetchWithRetry(`${BASE}/oauth/par`, {
66
+
method: 'POST',
67
+
headers: {
68
+
'Content-Type': 'application/x-www-form-urlencoded',
69
+
DPoP: parProof,
70
+
},
71
+
body: new URLSearchParams({
72
+
client_id: clientId,
73
+
redirect_uri: redirectUri,
74
+
response_type: 'code',
75
+
scope: scope,
76
+
code_challenge: codeChallenge,
77
+
code_challenge_method: 'S256',
78
+
login_hint: did,
79
+
}).toString(),
80
+
});
81
+
if (parRes.ok) {
82
+
parData = await parRes.json();
83
+
break;
84
+
}
85
+
if (attempt < 2) {
86
+
await new Promise((r) => setTimeout(r, 100 * (attempt + 1)));
87
+
} else {
88
+
const text = await parRes.text();
89
+
throw new Error(
90
+
`PAR request failed: ${parRes.status} - ${text.slice(0, 100)}`,
91
+
);
92
+
}
93
+
}
94
+
95
+
// Authorize (with retry)
96
+
let authCode;
97
+
for (let attempt = 0; attempt < 3; attempt++) {
98
+
const authRes = await fetchWithRetry(`${BASE}/oauth/authorize`, {
99
+
method: 'POST',
100
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
101
+
body: new URLSearchParams({
102
+
request_uri: parData.request_uri,
103
+
client_id: clientId,
104
+
password: password,
105
+
}).toString(),
106
+
redirect: 'manual',
107
+
});
108
+
const location = authRes.headers.get('location');
109
+
if (location) {
110
+
authCode = new URL(location).searchParams.get('code');
111
+
if (authCode) break;
112
+
}
113
+
if (attempt < 2) {
114
+
await new Promise((r) => setTimeout(r, 100 * (attempt + 1)));
115
+
} else {
116
+
throw new Error('Authorize request failed to return code');
117
+
}
118
+
}
119
+
120
+
// Token exchange (with retry and fresh DPoP proof)
121
+
let tokenData;
122
+
for (let attempt = 0; attempt < 3; attempt++) {
123
+
const tokenProof = await dpop.createProof('POST', `${BASE}/oauth/token`);
124
+
const tokenRes = await fetchWithRetry(`${BASE}/oauth/token`, {
125
+
method: 'POST',
126
+
headers: {
127
+
'Content-Type': 'application/x-www-form-urlencoded',
128
+
DPoP: tokenProof,
129
+
},
130
+
body: new URLSearchParams({
131
+
grant_type: 'authorization_code',
132
+
code: authCode,
133
+
client_id: clientId,
134
+
redirect_uri: redirectUri,
135
+
code_verifier: codeVerifier,
136
+
}).toString(),
137
+
});
138
+
if (tokenRes.ok) {
139
+
tokenData = await tokenRes.json();
140
+
break;
141
+
}
142
+
if (attempt < 2) {
143
+
await new Promise((r) => setTimeout(r, 100 * (attempt + 1)));
144
+
} else {
145
+
const text = await tokenRes.text();
146
+
throw new Error(
147
+
`Token request failed: ${tokenRes.status} - ${text.slice(0, 100)}`,
148
+
);
149
+
}
150
+
}
151
+
152
+
return {
153
+
accessToken: tokenData.access_token,
154
+
refreshToken: tokenData.refresh_token,
155
+
dpop,
156
+
};
157
+
}
+1102
-221
test/pds.test.js
+1102
-221
test/pds.test.js
···
1
-
import { test, describe } from 'node:test'
2
-
import assert from 'node:assert'
1
+
import assert from 'node:assert';
2
+
import { describe, test } from 'node:test';
3
3
import {
4
-
cborEncode, cborDecode, createCid, cidToString, cidToBytes, base32Encode, createTid,
5
-
generateKeyPair, importPrivateKey, sign, bytesToHex, hexToBytes,
6
-
getKeyDepth, varint, base32Decode, buildCarFile
7
-
} from '../src/pds.js'
4
+
base32Decode,
5
+
base32Encode,
6
+
base64UrlDecode,
7
+
base64UrlEncode,
8
+
buildCarFile,
9
+
bytesToHex,
10
+
cborDecode,
11
+
cborEncode,
12
+
cidToString,
13
+
computeJwkThumbprint,
14
+
createAccessJwt,
15
+
createBlobCid,
16
+
createCid,
17
+
createRefreshJwt,
18
+
createTid,
19
+
findBlobRefs,
20
+
generateKeyPair,
21
+
getKeyDepth,
22
+
getKnownServiceUrl,
23
+
getLoopbackClientMetadata,
24
+
hexToBytes,
25
+
importPrivateKey,
26
+
isLoopbackClient,
27
+
matchesMime,
28
+
parseAtprotoProxyHeader,
29
+
parseBlobScope,
30
+
parseRepoScope,
31
+
parseScopesForDisplay,
32
+
ScopePermissions,
33
+
sign,
34
+
sniffMimeType,
35
+
validateClientMetadata,
36
+
varint,
37
+
verifyAccessJwt,
38
+
verifyRefreshJwt,
39
+
} from '../src/pds.js';
40
+
41
+
// Internal constant - not exported from pds.js due to Cloudflare Workers limitation
42
+
const BSKY_APPVIEW_URL = 'https://api.bsky.app';
8
43
9
44
describe('CBOR Encoding', () => {
10
45
test('encodes simple map', () => {
11
-
const encoded = cborEncode({ hello: 'world', num: 42 })
46
+
const encoded = cborEncode({ hello: 'world', num: 42 });
12
47
// Expected: a2 65 68 65 6c 6c 6f 65 77 6f 72 6c 64 63 6e 75 6d 18 2a
13
48
const expected = new Uint8Array([
14
-
0xa2, 0x65, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x65, 0x77, 0x6f, 0x72, 0x6c, 0x64,
15
-
0x63, 0x6e, 0x75, 0x6d, 0x18, 0x2a
16
-
])
17
-
assert.deepStrictEqual(encoded, expected)
18
-
})
49
+
0xa2, 0x65, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x65, 0x77, 0x6f, 0x72, 0x6c,
50
+
0x64, 0x63, 0x6e, 0x75, 0x6d, 0x18, 0x2a,
51
+
]);
52
+
assert.deepStrictEqual(encoded, expected);
53
+
});
19
54
20
55
test('encodes null', () => {
21
-
const encoded = cborEncode(null)
22
-
assert.deepStrictEqual(encoded, new Uint8Array([0xf6]))
23
-
})
56
+
const encoded = cborEncode(null);
57
+
assert.deepStrictEqual(encoded, new Uint8Array([0xf6]));
58
+
});
24
59
25
60
test('encodes booleans', () => {
26
-
assert.deepStrictEqual(cborEncode(true), new Uint8Array([0xf5]))
27
-
assert.deepStrictEqual(cborEncode(false), new Uint8Array([0xf4]))
28
-
})
61
+
assert.deepStrictEqual(cborEncode(true), new Uint8Array([0xf5]));
62
+
assert.deepStrictEqual(cborEncode(false), new Uint8Array([0xf4]));
63
+
});
29
64
30
65
test('encodes small integers', () => {
31
-
assert.deepStrictEqual(cborEncode(0), new Uint8Array([0x00]))
32
-
assert.deepStrictEqual(cborEncode(1), new Uint8Array([0x01]))
33
-
assert.deepStrictEqual(cborEncode(23), new Uint8Array([0x17]))
34
-
})
66
+
assert.deepStrictEqual(cborEncode(0), new Uint8Array([0x00]));
67
+
assert.deepStrictEqual(cborEncode(1), new Uint8Array([0x01]));
68
+
assert.deepStrictEqual(cborEncode(23), new Uint8Array([0x17]));
69
+
});
35
70
36
71
test('encodes integers >= 24', () => {
37
-
assert.deepStrictEqual(cborEncode(24), new Uint8Array([0x18, 0x18]))
38
-
assert.deepStrictEqual(cborEncode(255), new Uint8Array([0x18, 0xff]))
39
-
})
72
+
assert.deepStrictEqual(cborEncode(24), new Uint8Array([0x18, 0x18]));
73
+
assert.deepStrictEqual(cborEncode(255), new Uint8Array([0x18, 0xff]));
74
+
});
40
75
41
76
test('encodes negative integers', () => {
42
-
assert.deepStrictEqual(cborEncode(-1), new Uint8Array([0x20]))
43
-
assert.deepStrictEqual(cborEncode(-10), new Uint8Array([0x29]))
44
-
})
77
+
assert.deepStrictEqual(cborEncode(-1), new Uint8Array([0x20]));
78
+
assert.deepStrictEqual(cborEncode(-10), new Uint8Array([0x29]));
79
+
});
45
80
46
81
test('encodes strings', () => {
47
-
const encoded = cborEncode('hello')
82
+
const encoded = cborEncode('hello');
48
83
// 0x65 = text string of length 5
49
-
assert.deepStrictEqual(encoded, new Uint8Array([0x65, 0x68, 0x65, 0x6c, 0x6c, 0x6f]))
50
-
})
84
+
assert.deepStrictEqual(
85
+
encoded,
86
+
new Uint8Array([0x65, 0x68, 0x65, 0x6c, 0x6c, 0x6f]),
87
+
);
88
+
});
51
89
52
90
test('encodes byte strings', () => {
53
-
const bytes = new Uint8Array([1, 2, 3])
54
-
const encoded = cborEncode(bytes)
91
+
const bytes = new Uint8Array([1, 2, 3]);
92
+
const encoded = cborEncode(bytes);
55
93
// 0x43 = byte string of length 3
56
-
assert.deepStrictEqual(encoded, new Uint8Array([0x43, 1, 2, 3]))
57
-
})
94
+
assert.deepStrictEqual(encoded, new Uint8Array([0x43, 1, 2, 3]));
95
+
});
58
96
59
97
test('encodes arrays', () => {
60
-
const encoded = cborEncode([1, 2, 3])
98
+
const encoded = cborEncode([1, 2, 3]);
61
99
// 0x83 = array of length 3
62
-
assert.deepStrictEqual(encoded, new Uint8Array([0x83, 0x01, 0x02, 0x03]))
63
-
})
100
+
assert.deepStrictEqual(encoded, new Uint8Array([0x83, 0x01, 0x02, 0x03]));
101
+
});
64
102
65
103
test('sorts map keys deterministically', () => {
66
-
const encoded1 = cborEncode({ z: 1, a: 2 })
67
-
const encoded2 = cborEncode({ a: 2, z: 1 })
68
-
assert.deepStrictEqual(encoded1, encoded2)
104
+
const encoded1 = cborEncode({ z: 1, a: 2 });
105
+
const encoded2 = cborEncode({ a: 2, z: 1 });
106
+
assert.deepStrictEqual(encoded1, encoded2);
69
107
// First key should be 'a' (0x61)
70
-
assert.strictEqual(encoded1[1], 0x61)
71
-
})
108
+
assert.strictEqual(encoded1[1], 0x61);
109
+
});
72
110
73
111
test('encodes large integers >= 2^31 without overflow', () => {
74
112
// 2^31 would overflow with bitshift operators (treated as signed 32-bit)
75
-
const twoTo31 = 2147483648
76
-
const encoded = cborEncode(twoTo31)
77
-
const decoded = cborDecode(encoded)
78
-
assert.strictEqual(decoded, twoTo31)
113
+
const twoTo31 = 2147483648;
114
+
const encoded = cborEncode(twoTo31);
115
+
const decoded = cborDecode(encoded);
116
+
assert.strictEqual(decoded, twoTo31);
79
117
80
118
// 2^32 - 1 (max unsigned 32-bit)
81
-
const maxU32 = 4294967295
82
-
const encoded2 = cborEncode(maxU32)
83
-
const decoded2 = cborDecode(encoded2)
84
-
assert.strictEqual(decoded2, maxU32)
85
-
})
119
+
const maxU32 = 4294967295;
120
+
const encoded2 = cborEncode(maxU32);
121
+
const decoded2 = cborDecode(encoded2);
122
+
assert.strictEqual(decoded2, maxU32);
123
+
});
86
124
87
125
test('encodes 2^31 with correct byte format', () => {
88
126
// 2147483648 = 0x80000000
89
127
// CBOR: major type 0 (unsigned int), additional info 26 (4-byte follows)
90
-
const encoded = cborEncode(2147483648)
91
-
assert.strictEqual(encoded[0], 0x1a) // type 0 | info 26
92
-
assert.strictEqual(encoded[1], 0x80)
93
-
assert.strictEqual(encoded[2], 0x00)
94
-
assert.strictEqual(encoded[3], 0x00)
95
-
assert.strictEqual(encoded[4], 0x00)
96
-
})
97
-
})
128
+
const encoded = cborEncode(2147483648);
129
+
assert.strictEqual(encoded[0], 0x1a); // type 0 | info 26
130
+
assert.strictEqual(encoded[1], 0x80);
131
+
assert.strictEqual(encoded[2], 0x00);
132
+
assert.strictEqual(encoded[3], 0x00);
133
+
assert.strictEqual(encoded[4], 0x00);
134
+
});
135
+
});
98
136
99
137
describe('Base32 Encoding', () => {
100
138
test('encodes bytes to base32lower', () => {
101
-
const bytes = new Uint8Array([0x01, 0x71, 0x12, 0x20])
102
-
const encoded = base32Encode(bytes)
103
-
assert.strictEqual(typeof encoded, 'string')
104
-
assert.match(encoded, /^[a-z2-7]+$/)
105
-
})
106
-
})
139
+
const bytes = new Uint8Array([0x01, 0x71, 0x12, 0x20]);
140
+
const encoded = base32Encode(bytes);
141
+
assert.strictEqual(typeof encoded, 'string');
142
+
assert.match(encoded, /^[a-z2-7]+$/);
143
+
});
144
+
});
107
145
108
146
describe('CID Generation', () => {
109
-
test('creates CIDv1 with dag-cbor codec', async () => {
110
-
const data = cborEncode({ test: 'data' })
111
-
const cid = await createCid(data)
147
+
test('createCid uses dag-cbor codec', async () => {
148
+
const data = cborEncode({ test: 'data' });
149
+
const cid = await createCid(data);
112
150
113
-
assert.strictEqual(cid.length, 36) // 2 prefix + 2 multihash header + 32 hash
114
-
assert.strictEqual(cid[0], 0x01) // CIDv1
115
-
assert.strictEqual(cid[1], 0x71) // dag-cbor
116
-
assert.strictEqual(cid[2], 0x12) // sha-256
117
-
assert.strictEqual(cid[3], 0x20) // 32 bytes
118
-
})
151
+
assert.strictEqual(cid.length, 36); // 2 prefix + 2 multihash header + 32 hash
152
+
assert.strictEqual(cid[0], 0x01); // CIDv1
153
+
assert.strictEqual(cid[1], 0x71); // dag-cbor
154
+
assert.strictEqual(cid[2], 0x12); // sha-256
155
+
assert.strictEqual(cid[3], 0x20); // 32 bytes
156
+
});
157
+
158
+
test('createBlobCid uses raw codec', async () => {
159
+
const data = new Uint8Array([0xff, 0xd8, 0xff, 0xe0]); // JPEG magic bytes
160
+
const cid = await createBlobCid(data);
161
+
162
+
assert.strictEqual(cid.length, 36);
163
+
assert.strictEqual(cid[0], 0x01); // CIDv1
164
+
assert.strictEqual(cid[1], 0x55); // raw codec
165
+
assert.strictEqual(cid[2], 0x12); // sha-256
166
+
assert.strictEqual(cid[3], 0x20); // 32 bytes
167
+
});
168
+
169
+
test('same bytes produce different CIDs with different codecs', async () => {
170
+
const data = new Uint8Array([1, 2, 3, 4]);
171
+
const dagCborCid = cidToString(await createCid(data));
172
+
const rawCid = cidToString(await createBlobCid(data));
173
+
174
+
assert.notStrictEqual(dagCborCid, rawCid);
175
+
});
119
176
120
177
test('cidToString returns base32lower with b prefix', async () => {
121
-
const data = cborEncode({ test: 'data' })
122
-
const cid = await createCid(data)
123
-
const cidStr = cidToString(cid)
178
+
const data = cborEncode({ test: 'data' });
179
+
const cid = await createCid(data);
180
+
const cidStr = cidToString(cid);
124
181
125
-
assert.strictEqual(cidStr[0], 'b')
126
-
assert.match(cidStr, /^b[a-z2-7]+$/)
127
-
})
182
+
assert.strictEqual(cidStr[0], 'b');
183
+
assert.match(cidStr, /^b[a-z2-7]+$/);
184
+
});
128
185
129
186
test('same input produces same CID', async () => {
130
-
const data1 = cborEncode({ test: 'data' })
131
-
const data2 = cborEncode({ test: 'data' })
132
-
const cid1 = cidToString(await createCid(data1))
133
-
const cid2 = cidToString(await createCid(data2))
187
+
const data1 = cborEncode({ test: 'data' });
188
+
const data2 = cborEncode({ test: 'data' });
189
+
const cid1 = cidToString(await createCid(data1));
190
+
const cid2 = cidToString(await createCid(data2));
134
191
135
-
assert.strictEqual(cid1, cid2)
136
-
})
192
+
assert.strictEqual(cid1, cid2);
193
+
});
137
194
138
195
test('different input produces different CID', async () => {
139
-
const cid1 = cidToString(await createCid(cborEncode({ a: 1 })))
140
-
const cid2 = cidToString(await createCid(cborEncode({ a: 2 })))
196
+
const cid1 = cidToString(await createCid(cborEncode({ a: 1 })));
197
+
const cid2 = cidToString(await createCid(cborEncode({ a: 2 })));
141
198
142
-
assert.notStrictEqual(cid1, cid2)
143
-
})
144
-
})
199
+
assert.notStrictEqual(cid1, cid2);
200
+
});
201
+
});
145
202
146
203
describe('TID Generation', () => {
147
204
test('creates 13-character TIDs', () => {
148
-
const tid = createTid()
149
-
assert.strictEqual(tid.length, 13)
150
-
})
205
+
const tid = createTid();
206
+
assert.strictEqual(tid.length, 13);
207
+
});
151
208
152
209
test('uses valid base32-sort characters', () => {
153
-
const tid = createTid()
154
-
assert.match(tid, /^[234567abcdefghijklmnopqrstuvwxyz]+$/)
155
-
})
210
+
const tid = createTid();
211
+
assert.match(tid, /^[234567abcdefghijklmnopqrstuvwxyz]+$/);
212
+
});
156
213
157
214
test('generates monotonically increasing TIDs', () => {
158
-
const tid1 = createTid()
159
-
const tid2 = createTid()
160
-
const tid3 = createTid()
215
+
const tid1 = createTid();
216
+
const tid2 = createTid();
217
+
const tid3 = createTid();
161
218
162
-
assert.ok(tid1 < tid2, `${tid1} should be less than ${tid2}`)
163
-
assert.ok(tid2 < tid3, `${tid2} should be less than ${tid3}`)
164
-
})
219
+
assert.ok(tid1 < tid2, `${tid1} should be less than ${tid2}`);
220
+
assert.ok(tid2 < tid3, `${tid2} should be less than ${tid3}`);
221
+
});
165
222
166
223
test('generates unique TIDs', () => {
167
-
const tids = new Set()
224
+
const tids = new Set();
168
225
for (let i = 0; i < 100; i++) {
169
-
tids.add(createTid())
226
+
tids.add(createTid());
170
227
}
171
-
assert.strictEqual(tids.size, 100)
172
-
})
173
-
})
228
+
assert.strictEqual(tids.size, 100);
229
+
});
230
+
});
174
231
175
232
describe('P-256 Signing', () => {
176
233
test('generates key pair with correct sizes', async () => {
177
-
const kp = await generateKeyPair()
234
+
const kp = await generateKeyPair();
178
235
179
-
assert.strictEqual(kp.privateKey.length, 32)
180
-
assert.strictEqual(kp.publicKey.length, 33) // compressed
181
-
assert.ok(kp.publicKey[0] === 0x02 || kp.publicKey[0] === 0x03)
182
-
})
236
+
assert.strictEqual(kp.privateKey.length, 32);
237
+
assert.strictEqual(kp.publicKey.length, 33); // compressed
238
+
assert.ok(kp.publicKey[0] === 0x02 || kp.publicKey[0] === 0x03);
239
+
});
183
240
184
241
test('can sign data with generated key', async () => {
185
-
const kp = await generateKeyPair()
186
-
const key = await importPrivateKey(kp.privateKey)
187
-
const data = new TextEncoder().encode('test message')
188
-
const sig = await sign(key, data)
242
+
const kp = await generateKeyPair();
243
+
const key = await importPrivateKey(kp.privateKey);
244
+
const data = new TextEncoder().encode('test message');
245
+
const sig = await sign(key, data);
189
246
190
-
assert.strictEqual(sig.length, 64) // r (32) + s (32)
191
-
})
247
+
assert.strictEqual(sig.length, 64); // r (32) + s (32)
248
+
});
192
249
193
250
test('different messages produce different signatures', async () => {
194
-
const kp = await generateKeyPair()
195
-
const key = await importPrivateKey(kp.privateKey)
251
+
const kp = await generateKeyPair();
252
+
const key = await importPrivateKey(kp.privateKey);
196
253
197
-
const sig1 = await sign(key, new TextEncoder().encode('message 1'))
198
-
const sig2 = await sign(key, new TextEncoder().encode('message 2'))
254
+
const sig1 = await sign(key, new TextEncoder().encode('message 1'));
255
+
const sig2 = await sign(key, new TextEncoder().encode('message 2'));
199
256
200
-
assert.notDeepStrictEqual(sig1, sig2)
201
-
})
257
+
assert.notDeepStrictEqual(sig1, sig2);
258
+
});
202
259
203
260
test('bytesToHex and hexToBytes roundtrip', () => {
204
-
const original = new Uint8Array([0x00, 0x0f, 0xf0, 0xff, 0xab, 0xcd])
205
-
const hex = bytesToHex(original)
206
-
const back = hexToBytes(hex)
261
+
const original = new Uint8Array([0x00, 0x0f, 0xf0, 0xff, 0xab, 0xcd]);
262
+
const hex = bytesToHex(original);
263
+
const back = hexToBytes(hex);
207
264
208
-
assert.strictEqual(hex, '000ff0ffabcd')
209
-
assert.deepStrictEqual(back, original)
210
-
})
265
+
assert.strictEqual(hex, '000ff0ffabcd');
266
+
assert.deepStrictEqual(back, original);
267
+
});
211
268
212
269
test('importPrivateKey rejects invalid key lengths', async () => {
213
270
// Too short
214
271
await assert.rejects(
215
272
() => importPrivateKey(new Uint8Array(31)),
216
-
/expected 32 bytes, got 31/
217
-
)
273
+
/expected 32 bytes, got 31/,
274
+
);
218
275
219
276
// Too long
220
277
await assert.rejects(
221
278
() => importPrivateKey(new Uint8Array(33)),
222
-
/expected 32 bytes, got 33/
223
-
)
279
+
/expected 32 bytes, got 33/,
280
+
);
224
281
225
282
// Empty
226
283
await assert.rejects(
227
284
() => importPrivateKey(new Uint8Array(0)),
228
-
/expected 32 bytes, got 0/
229
-
)
230
-
})
285
+
/expected 32 bytes, got 0/,
286
+
);
287
+
});
231
288
232
289
test('importPrivateKey rejects non-Uint8Array input', async () => {
233
290
// Arrays have .length but aren't Uint8Array
234
291
await assert.rejects(
235
292
() => importPrivateKey([1, 2, 3]),
236
-
/Invalid private key/
237
-
)
293
+
/Invalid private key/,
294
+
);
238
295
239
296
// Strings don't work either
240
297
await assert.rejects(
241
298
() => importPrivateKey('not bytes'),
242
-
/Invalid private key/
243
-
)
299
+
/Invalid private key/,
300
+
);
244
301
245
302
// null/undefined
246
-
await assert.rejects(
247
-
() => importPrivateKey(null),
248
-
/Invalid private key/
249
-
)
250
-
})
251
-
})
303
+
await assert.rejects(() => importPrivateKey(null), /Invalid private key/);
304
+
});
305
+
});
252
306
253
307
describe('MST Key Depth', () => {
254
308
test('returns a non-negative integer', async () => {
255
-
const depth = await getKeyDepth('app.bsky.feed.post/abc123')
256
-
assert.strictEqual(typeof depth, 'number')
257
-
assert.ok(depth >= 0)
258
-
})
309
+
const depth = await getKeyDepth('app.bsky.feed.post/abc123');
310
+
assert.strictEqual(typeof depth, 'number');
311
+
assert.ok(depth >= 0);
312
+
});
259
313
260
314
test('is deterministic for same key', async () => {
261
-
const key = 'app.bsky.feed.post/test123'
262
-
const depth1 = await getKeyDepth(key)
263
-
const depth2 = await getKeyDepth(key)
264
-
assert.strictEqual(depth1, depth2)
265
-
})
315
+
const key = 'app.bsky.feed.post/test123';
316
+
const depth1 = await getKeyDepth(key);
317
+
const depth2 = await getKeyDepth(key);
318
+
assert.strictEqual(depth1, depth2);
319
+
});
266
320
267
321
test('different keys can have different depths', async () => {
268
322
// Generate many keys and check we get some variation
269
-
const depths = new Set()
323
+
const depths = new Set();
270
324
for (let i = 0; i < 100; i++) {
271
-
depths.add(await getKeyDepth(`collection/key${i}`))
325
+
depths.add(await getKeyDepth(`collection/key${i}`));
272
326
}
273
327
// Should have at least 1 unique depth (realistically more)
274
-
assert.ok(depths.size >= 1)
275
-
})
328
+
assert.ok(depths.size >= 1);
329
+
});
276
330
277
331
test('handles empty string', async () => {
278
-
const depth = await getKeyDepth('')
279
-
assert.strictEqual(typeof depth, 'number')
280
-
assert.ok(depth >= 0)
281
-
})
332
+
const depth = await getKeyDepth('');
333
+
assert.strictEqual(typeof depth, 'number');
334
+
assert.ok(depth >= 0);
335
+
});
282
336
283
337
test('handles unicode strings', async () => {
284
-
const depth = await getKeyDepth('app.bsky.feed.post/รฉmoji๐')
285
-
assert.strictEqual(typeof depth, 'number')
286
-
assert.ok(depth >= 0)
287
-
})
288
-
})
338
+
const depth = await getKeyDepth('app.bsky.feed.post/รฉmoji๐');
339
+
assert.strictEqual(typeof depth, 'number');
340
+
assert.ok(depth >= 0);
341
+
});
342
+
});
289
343
290
344
describe('CBOR Decoding', () => {
291
345
test('decodes what encode produces (roundtrip)', () => {
292
-
const original = { hello: 'world', num: 42 }
293
-
const encoded = cborEncode(original)
294
-
const decoded = cborDecode(encoded)
295
-
assert.deepStrictEqual(decoded, original)
296
-
})
346
+
const original = { hello: 'world', num: 42 };
347
+
const encoded = cborEncode(original);
348
+
const decoded = cborDecode(encoded);
349
+
assert.deepStrictEqual(decoded, original);
350
+
});
297
351
298
352
test('decodes null', () => {
299
-
const encoded = cborEncode(null)
300
-
const decoded = cborDecode(encoded)
301
-
assert.strictEqual(decoded, null)
302
-
})
353
+
const encoded = cborEncode(null);
354
+
const decoded = cborDecode(encoded);
355
+
assert.strictEqual(decoded, null);
356
+
});
303
357
304
358
test('decodes booleans', () => {
305
-
assert.strictEqual(cborDecode(cborEncode(true)), true)
306
-
assert.strictEqual(cborDecode(cborEncode(false)), false)
307
-
})
359
+
assert.strictEqual(cborDecode(cborEncode(true)), true);
360
+
assert.strictEqual(cborDecode(cborEncode(false)), false);
361
+
});
308
362
309
363
test('decodes integers', () => {
310
-
assert.strictEqual(cborDecode(cborEncode(0)), 0)
311
-
assert.strictEqual(cborDecode(cborEncode(42)), 42)
312
-
assert.strictEqual(cborDecode(cborEncode(255)), 255)
313
-
assert.strictEqual(cborDecode(cborEncode(-1)), -1)
314
-
assert.strictEqual(cborDecode(cborEncode(-10)), -10)
315
-
})
364
+
assert.strictEqual(cborDecode(cborEncode(0)), 0);
365
+
assert.strictEqual(cborDecode(cborEncode(42)), 42);
366
+
assert.strictEqual(cborDecode(cborEncode(255)), 255);
367
+
assert.strictEqual(cborDecode(cborEncode(-1)), -1);
368
+
assert.strictEqual(cborDecode(cborEncode(-10)), -10);
369
+
});
316
370
317
371
test('decodes strings', () => {
318
-
assert.strictEqual(cborDecode(cborEncode('hello')), 'hello')
319
-
assert.strictEqual(cborDecode(cborEncode('')), '')
320
-
})
372
+
assert.strictEqual(cborDecode(cborEncode('hello')), 'hello');
373
+
assert.strictEqual(cborDecode(cborEncode('')), '');
374
+
});
321
375
322
376
test('decodes arrays', () => {
323
-
assert.deepStrictEqual(cborDecode(cborEncode([1, 2, 3])), [1, 2, 3])
324
-
assert.deepStrictEqual(cborDecode(cborEncode([])), [])
325
-
})
377
+
assert.deepStrictEqual(cborDecode(cborEncode([1, 2, 3])), [1, 2, 3]);
378
+
assert.deepStrictEqual(cborDecode(cborEncode([])), []);
379
+
});
326
380
327
381
test('decodes nested structures', () => {
328
-
const original = { arr: [1, { nested: true }], str: 'test' }
329
-
const decoded = cborDecode(cborEncode(original))
330
-
assert.deepStrictEqual(decoded, original)
331
-
})
332
-
})
382
+
const original = { arr: [1, { nested: true }], str: 'test' };
383
+
const decoded = cborDecode(cborEncode(original));
384
+
assert.deepStrictEqual(decoded, original);
385
+
});
386
+
});
333
387
334
388
describe('CAR File Builder', () => {
335
389
test('varint encodes small numbers', () => {
336
-
assert.deepStrictEqual(varint(0), new Uint8Array([0]))
337
-
assert.deepStrictEqual(varint(1), new Uint8Array([1]))
338
-
assert.deepStrictEqual(varint(127), new Uint8Array([127]))
339
-
})
390
+
assert.deepStrictEqual(varint(0), new Uint8Array([0]));
391
+
assert.deepStrictEqual(varint(1), new Uint8Array([1]));
392
+
assert.deepStrictEqual(varint(127), new Uint8Array([127]));
393
+
});
340
394
341
395
test('varint encodes multi-byte numbers', () => {
342
396
// 128 = 0x80 -> [0x80 | 0x00, 0x01] = [0x80, 0x01]
343
-
assert.deepStrictEqual(varint(128), new Uint8Array([0x80, 0x01]))
397
+
assert.deepStrictEqual(varint(128), new Uint8Array([0x80, 0x01]));
344
398
// 300 = 0x12c -> [0xac, 0x02]
345
-
assert.deepStrictEqual(varint(300), new Uint8Array([0xac, 0x02]))
346
-
})
399
+
assert.deepStrictEqual(varint(300), new Uint8Array([0xac, 0x02]));
400
+
});
347
401
348
402
test('base32 encode/decode roundtrip', () => {
349
-
const original = new Uint8Array([0x01, 0x71, 0x12, 0x20, 0xab, 0xcd])
350
-
const encoded = base32Encode(original)
351
-
const decoded = base32Decode(encoded)
352
-
assert.deepStrictEqual(decoded, original)
353
-
})
403
+
const original = new Uint8Array([0x01, 0x71, 0x12, 0x20, 0xab, 0xcd]);
404
+
const encoded = base32Encode(original);
405
+
const decoded = base32Decode(encoded);
406
+
assert.deepStrictEqual(decoded, original);
407
+
});
354
408
355
409
test('buildCarFile produces valid structure', async () => {
356
-
const data = cborEncode({ test: 'data' })
357
-
const cid = await createCid(data)
358
-
const cidStr = cidToString(cid)
410
+
const data = cborEncode({ test: 'data' });
411
+
const cid = await createCid(data);
412
+
const cidStr = cidToString(cid);
359
413
360
-
const car = buildCarFile(cidStr, [{ cid: cidStr, data }])
414
+
const car = buildCarFile(cidStr, [{ cid: cidStr, data }]);
361
415
362
-
assert.ok(car instanceof Uint8Array)
363
-
assert.ok(car.length > 0)
416
+
assert.ok(car instanceof Uint8Array);
417
+
assert.ok(car.length > 0);
364
418
// First byte should be varint of header length
365
-
assert.ok(car[0] > 0)
366
-
})
367
-
})
419
+
assert.ok(car[0] > 0);
420
+
});
421
+
});
422
+
423
+
describe('JWT Base64URL', () => {
424
+
test('base64UrlEncode encodes bytes correctly', () => {
425
+
const input = new TextEncoder().encode('hello world');
426
+
const encoded = base64UrlEncode(input);
427
+
assert.strictEqual(encoded, 'aGVsbG8gd29ybGQ');
428
+
assert.ok(!encoded.includes('+'));
429
+
assert.ok(!encoded.includes('/'));
430
+
assert.ok(!encoded.includes('='));
431
+
});
432
+
433
+
test('base64UrlDecode decodes string correctly', () => {
434
+
const decoded = base64UrlDecode('aGVsbG8gd29ybGQ');
435
+
const str = new TextDecoder().decode(decoded);
436
+
assert.strictEqual(str, 'hello world');
437
+
});
438
+
439
+
test('base64url roundtrip', () => {
440
+
const original = new Uint8Array([0, 1, 2, 255, 254, 253]);
441
+
const encoded = base64UrlEncode(original);
442
+
const decoded = base64UrlDecode(encoded);
443
+
assert.deepStrictEqual(decoded, original);
444
+
});
445
+
});
446
+
447
+
describe('JWT Creation', () => {
448
+
test('createAccessJwt creates valid JWT structure', async () => {
449
+
const did = 'did:web:test.example';
450
+
const secret = 'test-secret-key';
451
+
const jwt = await createAccessJwt(did, secret);
452
+
453
+
const parts = jwt.split('.');
454
+
assert.strictEqual(parts.length, 3);
455
+
456
+
// Decode header
457
+
const header = JSON.parse(
458
+
new TextDecoder().decode(base64UrlDecode(parts[0])),
459
+
);
460
+
assert.strictEqual(header.typ, 'at+jwt');
461
+
assert.strictEqual(header.alg, 'HS256');
462
+
463
+
// Decode payload
464
+
const payload = JSON.parse(
465
+
new TextDecoder().decode(base64UrlDecode(parts[1])),
466
+
);
467
+
assert.strictEqual(payload.scope, 'com.atproto.access');
468
+
assert.strictEqual(payload.sub, did);
469
+
assert.strictEqual(payload.aud, did);
470
+
assert.ok(payload.iat > 0);
471
+
assert.ok(payload.exp > payload.iat);
472
+
});
473
+
474
+
test('createRefreshJwt creates valid JWT with jti', async () => {
475
+
const did = 'did:web:test.example';
476
+
const secret = 'test-secret-key';
477
+
const jwt = await createRefreshJwt(did, secret);
478
+
479
+
const parts = jwt.split('.');
480
+
const header = JSON.parse(
481
+
new TextDecoder().decode(base64UrlDecode(parts[0])),
482
+
);
483
+
assert.strictEqual(header.typ, 'refresh+jwt');
484
+
485
+
const payload = JSON.parse(
486
+
new TextDecoder().decode(base64UrlDecode(parts[1])),
487
+
);
488
+
assert.strictEqual(payload.scope, 'com.atproto.refresh');
489
+
assert.ok(payload.jti); // has unique token ID
490
+
});
491
+
});
492
+
493
+
describe('JWT Verification', () => {
494
+
test('verifyAccessJwt returns payload for valid token', async () => {
495
+
const did = 'did:web:test.example';
496
+
const secret = 'test-secret-key';
497
+
const jwt = await createAccessJwt(did, secret);
498
+
499
+
const payload = await verifyAccessJwt(jwt, secret);
500
+
assert.strictEqual(payload.sub, did);
501
+
assert.strictEqual(payload.scope, 'com.atproto.access');
502
+
});
503
+
504
+
test('verifyAccessJwt throws for wrong secret', async () => {
505
+
const did = 'did:web:test.example';
506
+
const jwt = await createAccessJwt(did, 'correct-secret');
507
+
508
+
await assert.rejects(
509
+
() => verifyAccessJwt(jwt, 'wrong-secret'),
510
+
/invalid signature/i,
511
+
);
512
+
});
513
+
514
+
test('verifyAccessJwt throws for expired token', async () => {
515
+
const did = 'did:web:test.example';
516
+
const secret = 'test-secret-key';
517
+
// Create token that expired 1 second ago
518
+
const jwt = await createAccessJwt(did, secret, -1);
519
+
520
+
await assert.rejects(() => verifyAccessJwt(jwt, secret), /expired/i);
521
+
});
522
+
523
+
test('verifyAccessJwt throws for refresh token', async () => {
524
+
const did = 'did:web:test.example';
525
+
const secret = 'test-secret-key';
526
+
const jwt = await createRefreshJwt(did, secret);
527
+
528
+
await assert.rejects(
529
+
() => verifyAccessJwt(jwt, secret),
530
+
/invalid token type/i,
531
+
);
532
+
});
533
+
534
+
test('verifyRefreshJwt returns payload for valid token', async () => {
535
+
const did = 'did:web:test.example';
536
+
const secret = 'test-secret-key';
537
+
const jwt = await createRefreshJwt(did, secret);
538
+
539
+
const payload = await verifyRefreshJwt(jwt, secret);
540
+
assert.strictEqual(payload.sub, did);
541
+
assert.strictEqual(payload.scope, 'com.atproto.refresh');
542
+
assert.ok(payload.jti); // has token ID
543
+
});
544
+
545
+
test('verifyRefreshJwt throws for wrong secret', async () => {
546
+
const did = 'did:web:test.example';
547
+
const jwt = await createRefreshJwt(did, 'correct-secret');
548
+
549
+
await assert.rejects(
550
+
() => verifyRefreshJwt(jwt, 'wrong-secret'),
551
+
/invalid signature/i,
552
+
);
553
+
});
554
+
555
+
test('verifyRefreshJwt throws for expired token', async () => {
556
+
const did = 'did:web:test.example';
557
+
const secret = 'test-secret-key';
558
+
// Create token that expired 1 second ago
559
+
const jwt = await createRefreshJwt(did, secret, -1);
560
+
561
+
await assert.rejects(() => verifyRefreshJwt(jwt, secret), /expired/i);
562
+
});
563
+
564
+
test('verifyRefreshJwt throws for access token', async () => {
565
+
const did = 'did:web:test.example';
566
+
const secret = 'test-secret-key';
567
+
const jwt = await createAccessJwt(did, secret);
568
+
569
+
await assert.rejects(
570
+
() => verifyRefreshJwt(jwt, secret),
571
+
/invalid token type/i,
572
+
);
573
+
});
574
+
575
+
test('verifyAccessJwt throws for malformed JWT', async () => {
576
+
const secret = 'test-secret-key';
577
+
578
+
// Not a JWT at all
579
+
await assert.rejects(
580
+
() => verifyAccessJwt('not-a-jwt', secret),
581
+
/Invalid JWT format/i,
582
+
);
583
+
584
+
// Only two parts
585
+
await assert.rejects(
586
+
() => verifyAccessJwt('two.parts', secret),
587
+
/Invalid JWT format/i,
588
+
);
589
+
590
+
// Four parts
591
+
await assert.rejects(
592
+
() => verifyAccessJwt('one.two.three.four', secret),
593
+
/Invalid JWT format/i,
594
+
);
595
+
});
596
+
597
+
test('verifyRefreshJwt throws for malformed JWT', async () => {
598
+
const secret = 'test-secret-key';
599
+
600
+
await assert.rejects(
601
+
() => verifyRefreshJwt('not-a-jwt', secret),
602
+
/Invalid JWT format/i,
603
+
);
604
+
605
+
await assert.rejects(
606
+
() => verifyRefreshJwt('two.parts', secret),
607
+
/Invalid JWT format/i,
608
+
);
609
+
});
610
+
});
611
+
612
+
describe('MIME Type Sniffing', () => {
613
+
test('detects JPEG', () => {
614
+
const bytes = new Uint8Array([0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10]);
615
+
assert.strictEqual(sniffMimeType(bytes), 'image/jpeg');
616
+
});
617
+
618
+
test('detects PNG', () => {
619
+
const bytes = new Uint8Array([
620
+
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a,
621
+
]);
622
+
assert.strictEqual(sniffMimeType(bytes), 'image/png');
623
+
});
624
+
625
+
test('detects GIF', () => {
626
+
const bytes = new Uint8Array([0x47, 0x49, 0x46, 0x38, 0x39, 0x61]);
627
+
assert.strictEqual(sniffMimeType(bytes), 'image/gif');
628
+
});
629
+
630
+
test('detects WebP', () => {
631
+
const bytes = new Uint8Array([
632
+
0x52,
633
+
0x49,
634
+
0x46,
635
+
0x46, // RIFF
636
+
0x00,
637
+
0x00,
638
+
0x00,
639
+
0x00, // size (ignored)
640
+
0x57,
641
+
0x45,
642
+
0x42,
643
+
0x50, // WEBP
644
+
]);
645
+
assert.strictEqual(sniffMimeType(bytes), 'image/webp');
646
+
});
647
+
648
+
test('detects MP4', () => {
649
+
const bytes = new Uint8Array([
650
+
0x00,
651
+
0x00,
652
+
0x00,
653
+
0x18, // size
654
+
0x66,
655
+
0x74,
656
+
0x79,
657
+
0x70, // ftyp
658
+
0x69,
659
+
0x73,
660
+
0x6f,
661
+
0x6d, // isom brand
662
+
]);
663
+
assert.strictEqual(sniffMimeType(bytes), 'video/mp4');
664
+
});
665
+
666
+
test('detects AVIF', () => {
667
+
const bytes = new Uint8Array([
668
+
0x00,
669
+
0x00,
670
+
0x00,
671
+
0x1c, // size
672
+
0x66,
673
+
0x74,
674
+
0x79,
675
+
0x70, // ftyp
676
+
0x61,
677
+
0x76,
678
+
0x69,
679
+
0x66, // avif brand
680
+
]);
681
+
assert.strictEqual(sniffMimeType(bytes), 'image/avif');
682
+
});
683
+
684
+
test('detects HEIC', () => {
685
+
const bytes = new Uint8Array([
686
+
0x00,
687
+
0x00,
688
+
0x00,
689
+
0x18, // size
690
+
0x66,
691
+
0x74,
692
+
0x79,
693
+
0x70, // ftyp
694
+
0x68,
695
+
0x65,
696
+
0x69,
697
+
0x63, // heic brand
698
+
]);
699
+
assert.strictEqual(sniffMimeType(bytes), 'image/heic');
700
+
});
701
+
702
+
test('returns null for unknown', () => {
703
+
const bytes = new Uint8Array([0x00, 0x01, 0x02, 0x03]);
704
+
assert.strictEqual(sniffMimeType(bytes), null);
705
+
});
706
+
});
707
+
708
+
describe('Blob Ref Detection', () => {
709
+
test('finds blob ref in simple object', () => {
710
+
const record = {
711
+
$type: 'app.bsky.feed.post',
712
+
text: 'Hello',
713
+
embed: {
714
+
$type: 'app.bsky.embed.images',
715
+
images: [
716
+
{
717
+
image: {
718
+
$type: 'blob',
719
+
ref: { $link: 'bafkreiabc123' },
720
+
mimeType: 'image/jpeg',
721
+
size: 1234,
722
+
},
723
+
alt: 'test image',
724
+
},
725
+
],
726
+
},
727
+
};
728
+
const refs = findBlobRefs(record);
729
+
assert.deepStrictEqual(refs, ['bafkreiabc123']);
730
+
});
731
+
732
+
test('finds multiple blob refs', () => {
733
+
const record = {
734
+
images: [
735
+
{
736
+
image: {
737
+
$type: 'blob',
738
+
ref: { $link: 'cid1' },
739
+
mimeType: 'image/png',
740
+
size: 100,
741
+
},
742
+
},
743
+
{
744
+
image: {
745
+
$type: 'blob',
746
+
ref: { $link: 'cid2' },
747
+
mimeType: 'image/png',
748
+
size: 200,
749
+
},
750
+
},
751
+
],
752
+
};
753
+
const refs = findBlobRefs(record);
754
+
assert.deepStrictEqual(refs, ['cid1', 'cid2']);
755
+
});
756
+
757
+
test('returns empty array when no blobs', () => {
758
+
const record = { text: 'Hello world', count: 42 };
759
+
const refs = findBlobRefs(record);
760
+
assert.deepStrictEqual(refs, []);
761
+
});
762
+
763
+
test('handles null and primitives', () => {
764
+
assert.deepStrictEqual(findBlobRefs(null), []);
765
+
assert.deepStrictEqual(findBlobRefs('string'), []);
766
+
assert.deepStrictEqual(findBlobRefs(42), []);
767
+
});
768
+
});
769
+
770
+
describe('JWK Thumbprint', () => {
771
+
test('computes deterministic thumbprint for EC key', async () => {
772
+
// Test vector: known JWK and its expected thumbprint
773
+
const jwk = {
774
+
kty: 'EC',
775
+
crv: 'P-256',
776
+
x: 'WbbCfHGZ9QtKsVuMdPZ8hBbP2949N_CSLG3LVV0nnKY',
777
+
y: 'eSgPlDj0RVMw8t8u4MvCYG4j_JfDwvrMUUwEEHVLmqQ',
778
+
};
779
+
780
+
const jkt1 = await computeJwkThumbprint(jwk);
781
+
const jkt2 = await computeJwkThumbprint(jwk);
782
+
783
+
// Thumbprint must be deterministic
784
+
assert.strictEqual(jkt1, jkt2);
785
+
// Must be base64url-encoded SHA-256 (43 chars)
786
+
assert.strictEqual(jkt1.length, 43);
787
+
// Must only contain base64url characters
788
+
assert.match(jkt1, /^[A-Za-z0-9_-]+$/);
789
+
});
790
+
791
+
test('produces different thumbprints for different keys', async () => {
792
+
const jwk1 = {
793
+
kty: 'EC',
794
+
crv: 'P-256',
795
+
x: 'WbbCfHGZ9QtKsVuMdPZ8hBbP2949N_CSLG3LVV0nnKY',
796
+
y: 'eSgPlDj0RVMw8t8u4MvCYG4j_JfDwvrMUUwEEHVLmqQ',
797
+
};
798
+
const jwk2 = {
799
+
kty: 'EC',
800
+
crv: 'P-256',
801
+
x: 'f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU',
802
+
y: 'x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0',
803
+
};
804
+
805
+
const jkt1 = await computeJwkThumbprint(jwk1);
806
+
const jkt2 = await computeJwkThumbprint(jwk2);
807
+
808
+
assert.notStrictEqual(jkt1, jkt2);
809
+
});
810
+
});
811
+
812
+
describe('Client Metadata', () => {
813
+
test('isLoopbackClient detects localhost', () => {
814
+
assert.strictEqual(isLoopbackClient('http://localhost:8080'), true);
815
+
assert.strictEqual(isLoopbackClient('http://127.0.0.1:3000'), true);
816
+
assert.strictEqual(isLoopbackClient('https://example.com'), false);
817
+
});
818
+
819
+
test('getLoopbackClientMetadata returns permissive defaults', () => {
820
+
const metadata = getLoopbackClientMetadata('http://localhost:8080');
821
+
assert.strictEqual(metadata.client_id, 'http://localhost:8080');
822
+
assert.ok(metadata.grant_types.includes('authorization_code'));
823
+
assert.strictEqual(metadata.dpop_bound_access_tokens, true);
824
+
});
825
+
826
+
test('validateClientMetadata rejects mismatched client_id', () => {
827
+
const metadata = {
828
+
client_id: 'https://other.com/metadata.json',
829
+
redirect_uris: ['https://example.com/callback'],
830
+
grant_types: ['authorization_code'],
831
+
response_types: ['code'],
832
+
};
833
+
assert.throws(
834
+
() =>
835
+
validateClientMetadata(metadata, 'https://example.com/metadata.json'),
836
+
/client_id mismatch/,
837
+
);
838
+
});
839
+
});
840
+
841
+
describe('Proxy Utilities', () => {
842
+
describe('parseAtprotoProxyHeader', () => {
843
+
test('parses valid header', () => {
844
+
const result = parseAtprotoProxyHeader(
845
+
'did:web:api.bsky.app#bsky_appview',
846
+
);
847
+
assert.deepStrictEqual(result, {
848
+
did: 'did:web:api.bsky.app',
849
+
serviceId: 'bsky_appview',
850
+
});
851
+
});
852
+
853
+
test('parses header with did:plc', () => {
854
+
const result = parseAtprotoProxyHeader(
855
+
'did:plc:z72i7hdynmk6r22z27h6tvur#atproto_labeler',
856
+
);
857
+
assert.deepStrictEqual(result, {
858
+
did: 'did:plc:z72i7hdynmk6r22z27h6tvur',
859
+
serviceId: 'atproto_labeler',
860
+
});
861
+
});
862
+
863
+
test('returns null for null/undefined', () => {
864
+
assert.strictEqual(parseAtprotoProxyHeader(null), null);
865
+
assert.strictEqual(parseAtprotoProxyHeader(undefined), null);
866
+
assert.strictEqual(parseAtprotoProxyHeader(''), null);
867
+
});
868
+
869
+
test('returns null for header without fragment', () => {
870
+
assert.strictEqual(parseAtprotoProxyHeader('did:web:api.bsky.app'), null);
871
+
});
872
+
873
+
test('returns null for header with only fragment', () => {
874
+
assert.strictEqual(parseAtprotoProxyHeader('#bsky_appview'), null);
875
+
});
876
+
877
+
test('returns null for header with trailing fragment', () => {
878
+
assert.strictEqual(
879
+
parseAtprotoProxyHeader('did:web:api.bsky.app#'),
880
+
null,
881
+
);
882
+
});
883
+
});
884
+
885
+
describe('getKnownServiceUrl', () => {
886
+
test('returns URL for known Bluesky AppView', () => {
887
+
const result = getKnownServiceUrl('did:web:api.bsky.app', 'bsky_appview');
888
+
assert.strictEqual(result, BSKY_APPVIEW_URL);
889
+
});
890
+
891
+
test('returns null for unknown service DID', () => {
892
+
const result = getKnownServiceUrl(
893
+
'did:web:unknown.service',
894
+
'bsky_appview',
895
+
);
896
+
assert.strictEqual(result, null);
897
+
});
898
+
899
+
test('returns null for unknown service ID', () => {
900
+
const result = getKnownServiceUrl(
901
+
'did:web:api.bsky.app',
902
+
'unknown_service',
903
+
);
904
+
assert.strictEqual(result, null);
905
+
});
906
+
907
+
test('returns null for both unknown', () => {
908
+
const result = getKnownServiceUrl('did:web:unknown', 'unknown');
909
+
assert.strictEqual(result, null);
910
+
});
911
+
});
912
+
});
913
+
914
+
describe('Scope Parsing', () => {
915
+
describe('parseRepoScope', () => {
916
+
test('parses repo scope with query parameter action', () => {
917
+
const result = parseRepoScope('repo:app.bsky.feed.post?action=create');
918
+
assert.deepStrictEqual(result, {
919
+
collection: 'app.bsky.feed.post',
920
+
actions: ['create'],
921
+
});
922
+
});
923
+
924
+
test('parses repo scope with multiple query parameter actions', () => {
925
+
const result = parseRepoScope(
926
+
'repo:app.bsky.feed.post?action=create&action=update',
927
+
);
928
+
assert.deepStrictEqual(result, {
929
+
collection: 'app.bsky.feed.post',
930
+
actions: ['create', 'update'],
931
+
});
932
+
});
933
+
934
+
test('parses repo scope without actions as all actions', () => {
935
+
const result = parseRepoScope('repo:app.bsky.feed.post');
936
+
assert.deepStrictEqual(result, {
937
+
collection: 'app.bsky.feed.post',
938
+
actions: ['create', 'update', 'delete'],
939
+
});
940
+
});
941
+
942
+
test('parses wildcard collection with action', () => {
943
+
const result = parseRepoScope('repo:*?action=create');
944
+
assert.deepStrictEqual(result, {
945
+
collection: '*',
946
+
actions: ['create'],
947
+
});
948
+
});
949
+
950
+
test('parses query-only format', () => {
951
+
const result = parseRepoScope(
952
+
'repo?collection=app.bsky.feed.post&action=create',
953
+
);
954
+
assert.deepStrictEqual(result, {
955
+
collection: 'app.bsky.feed.post',
956
+
actions: ['create'],
957
+
});
958
+
});
959
+
960
+
test('deduplicates repeated actions', () => {
961
+
const result = parseRepoScope(
962
+
'repo:app.bsky.feed.post?action=create&action=create&action=update',
963
+
);
964
+
assert.deepStrictEqual(result, {
965
+
collection: 'app.bsky.feed.post',
966
+
actions: ['create', 'update'],
967
+
});
968
+
});
969
+
970
+
test('returns null for non-repo scope', () => {
971
+
assert.strictEqual(parseRepoScope('atproto'), null);
972
+
assert.strictEqual(parseRepoScope('blob:image/*'), null);
973
+
assert.strictEqual(parseRepoScope('transition:generic'), null);
974
+
});
975
+
976
+
test('returns null for invalid repo scope', () => {
977
+
assert.strictEqual(parseRepoScope('repo:'), null);
978
+
assert.strictEqual(parseRepoScope('repo?'), null);
979
+
});
980
+
});
981
+
982
+
describe('parseBlobScope', () => {
983
+
test('parses wildcard MIME', () => {
984
+
const result = parseBlobScope('blob:*/*');
985
+
assert.deepStrictEqual(result, { accept: ['*/*'] });
986
+
});
987
+
988
+
test('parses type wildcard', () => {
989
+
const result = parseBlobScope('blob:image/*');
990
+
assert.deepStrictEqual(result, { accept: ['image/*'] });
991
+
});
992
+
993
+
test('parses specific MIME', () => {
994
+
const result = parseBlobScope('blob:image/png');
995
+
assert.deepStrictEqual(result, { accept: ['image/png'] });
996
+
});
997
+
998
+
test('parses multiple MIMEs', () => {
999
+
const result = parseBlobScope('blob:image/png,image/jpeg');
1000
+
assert.deepStrictEqual(result, { accept: ['image/png', 'image/jpeg'] });
1001
+
});
1002
+
1003
+
test('returns null for non-blob scope', () => {
1004
+
assert.strictEqual(parseBlobScope('atproto'), null);
1005
+
assert.strictEqual(parseBlobScope('repo:*:create'), null);
1006
+
});
1007
+
});
1008
+
1009
+
describe('matchesMime', () => {
1010
+
test('wildcard matches everything', () => {
1011
+
assert.strictEqual(matchesMime('*/*', 'image/png'), true);
1012
+
assert.strictEqual(matchesMime('*/*', 'video/mp4'), true);
1013
+
});
1014
+
1015
+
test('type wildcard matches same type', () => {
1016
+
assert.strictEqual(matchesMime('image/*', 'image/png'), true);
1017
+
assert.strictEqual(matchesMime('image/*', 'image/jpeg'), true);
1018
+
assert.strictEqual(matchesMime('image/*', 'video/mp4'), false);
1019
+
});
1020
+
1021
+
test('exact match', () => {
1022
+
assert.strictEqual(matchesMime('image/png', 'image/png'), true);
1023
+
assert.strictEqual(matchesMime('image/png', 'image/jpeg'), false);
1024
+
});
1025
+
1026
+
test('case insensitive', () => {
1027
+
assert.strictEqual(matchesMime('image/PNG', 'image/png'), true);
1028
+
assert.strictEqual(matchesMime('IMAGE/*', 'image/png'), true);
1029
+
});
1030
+
});
1031
+
});
1032
+
1033
+
describe('ScopePermissions', () => {
1034
+
describe('static scopes', () => {
1035
+
test('atproto grants full access', () => {
1036
+
const perms = new ScopePermissions('atproto');
1037
+
assert.strictEqual(
1038
+
perms.allowsRepo('app.bsky.feed.post', 'create'),
1039
+
true,
1040
+
);
1041
+
assert.strictEqual(perms.allowsRepo('any.collection', 'delete'), true);
1042
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
1043
+
assert.strictEqual(perms.allowsBlob('video/mp4'), true);
1044
+
});
1045
+
1046
+
test('transition:generic grants full repo/blob access', () => {
1047
+
const perms = new ScopePermissions('transition:generic');
1048
+
assert.strictEqual(
1049
+
perms.allowsRepo('app.bsky.feed.post', 'create'),
1050
+
true,
1051
+
);
1052
+
assert.strictEqual(perms.allowsRepo('any.collection', 'delete'), true);
1053
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
1054
+
});
1055
+
});
1056
+
1057
+
describe('repo scopes', () => {
1058
+
test('wildcard collection allows any collection', () => {
1059
+
const perms = new ScopePermissions('repo:*?action=create');
1060
+
assert.strictEqual(
1061
+
perms.allowsRepo('app.bsky.feed.post', 'create'),
1062
+
true,
1063
+
);
1064
+
assert.strictEqual(
1065
+
perms.allowsRepo('app.bsky.feed.like', 'create'),
1066
+
true,
1067
+
);
1068
+
assert.strictEqual(
1069
+
perms.allowsRepo('app.bsky.feed.post', 'delete'),
1070
+
false,
1071
+
);
1072
+
});
1073
+
1074
+
test('specific collection restricts to that collection', () => {
1075
+
const perms = new ScopePermissions(
1076
+
'repo:app.bsky.feed.post?action=create',
1077
+
);
1078
+
assert.strictEqual(
1079
+
perms.allowsRepo('app.bsky.feed.post', 'create'),
1080
+
true,
1081
+
);
1082
+
assert.strictEqual(
1083
+
perms.allowsRepo('app.bsky.feed.like', 'create'),
1084
+
false,
1085
+
);
1086
+
});
1087
+
1088
+
test('multiple actions', () => {
1089
+
const perms = new ScopePermissions('repo:*?action=create&action=update');
1090
+
assert.strictEqual(perms.allowsRepo('x', 'create'), true);
1091
+
assert.strictEqual(perms.allowsRepo('x', 'update'), true);
1092
+
assert.strictEqual(perms.allowsRepo('x', 'delete'), false);
1093
+
});
1094
+
1095
+
test('multiple scopes combine', () => {
1096
+
const perms = new ScopePermissions(
1097
+
'repo:app.bsky.feed.post?action=create repo:app.bsky.feed.like?action=delete',
1098
+
);
1099
+
assert.strictEqual(
1100
+
perms.allowsRepo('app.bsky.feed.post', 'create'),
1101
+
true,
1102
+
);
1103
+
assert.strictEqual(
1104
+
perms.allowsRepo('app.bsky.feed.like', 'delete'),
1105
+
true,
1106
+
);
1107
+
assert.strictEqual(
1108
+
perms.allowsRepo('app.bsky.feed.post', 'delete'),
1109
+
false,
1110
+
);
1111
+
});
1112
+
1113
+
test('allowsRepo with query param format scopes', () => {
1114
+
const perms = new ScopePermissions(
1115
+
'atproto repo:app.bsky.feed.post?action=create',
1116
+
);
1117
+
assert.strictEqual(
1118
+
perms.allowsRepo('app.bsky.feed.post', 'create'),
1119
+
true,
1120
+
);
1121
+
assert.strictEqual(
1122
+
perms.allowsRepo('app.bsky.feed.post', 'delete'),
1123
+
true,
1124
+
); // atproto grants full access
1125
+
});
1126
+
});
1127
+
1128
+
describe('blob scopes', () => {
1129
+
test('wildcard allows any MIME', () => {
1130
+
const perms = new ScopePermissions('blob:*/*');
1131
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
1132
+
assert.strictEqual(perms.allowsBlob('video/mp4'), true);
1133
+
});
1134
+
1135
+
test('type wildcard restricts to type', () => {
1136
+
const perms = new ScopePermissions('blob:image/*');
1137
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
1138
+
assert.strictEqual(perms.allowsBlob('image/jpeg'), true);
1139
+
assert.strictEqual(perms.allowsBlob('video/mp4'), false);
1140
+
});
1141
+
1142
+
test('specific MIME restricts exactly', () => {
1143
+
const perms = new ScopePermissions('blob:image/png');
1144
+
assert.strictEqual(perms.allowsBlob('image/png'), true);
1145
+
assert.strictEqual(perms.allowsBlob('image/jpeg'), false);
1146
+
});
1147
+
});
1148
+
1149
+
describe('empty/no scope', () => {
1150
+
test('no scope denies everything', () => {
1151
+
const perms = new ScopePermissions('');
1152
+
assert.strictEqual(perms.allowsRepo('x', 'create'), false);
1153
+
assert.strictEqual(perms.allowsBlob('image/png'), false);
1154
+
});
1155
+
1156
+
test('undefined scope denies everything', () => {
1157
+
const perms = new ScopePermissions(undefined);
1158
+
assert.strictEqual(perms.allowsRepo('x', 'create'), false);
1159
+
});
1160
+
});
1161
+
1162
+
describe('assertRepo', () => {
1163
+
test('throws ScopeMissingError when denied', () => {
1164
+
const perms = new ScopePermissions(
1165
+
'repo:app.bsky.feed.post?action=create',
1166
+
);
1167
+
assert.throws(() => perms.assertRepo('app.bsky.feed.like', 'create'), {
1168
+
message: /Missing required scope/,
1169
+
});
1170
+
});
1171
+
1172
+
test('does not throw when allowed', () => {
1173
+
const perms = new ScopePermissions(
1174
+
'repo:app.bsky.feed.post?action=create',
1175
+
);
1176
+
assert.doesNotThrow(() =>
1177
+
perms.assertRepo('app.bsky.feed.post', 'create'),
1178
+
);
1179
+
});
1180
+
});
1181
+
1182
+
describe('assertBlob', () => {
1183
+
test('throws ScopeMissingError when denied', () => {
1184
+
const perms = new ScopePermissions('blob:image/*');
1185
+
assert.throws(() => perms.assertBlob('video/mp4'), {
1186
+
message: /Missing required scope/,
1187
+
});
1188
+
});
1189
+
1190
+
test('does not throw when allowed', () => {
1191
+
const perms = new ScopePermissions('blob:image/*');
1192
+
assert.doesNotThrow(() => perms.assertBlob('image/png'));
1193
+
});
1194
+
});
1195
+
});
1196
+
1197
+
describe('parseScopesForDisplay', () => {
1198
+
test('parses identity-only scope', () => {
1199
+
const result = parseScopesForDisplay('atproto');
1200
+
assert.strictEqual(result.hasAtproto, true);
1201
+
assert.strictEqual(result.hasTransitionGeneric, false);
1202
+
assert.strictEqual(result.repoPermissions.size, 0);
1203
+
assert.deepStrictEqual(result.blobPermissions, []);
1204
+
});
1205
+
1206
+
test('parses granular repo scopes', () => {
1207
+
const result = parseScopesForDisplay(
1208
+
'atproto repo:app.bsky.feed.post?action=create&action=update',
1209
+
);
1210
+
assert.strictEqual(result.repoPermissions.size, 1);
1211
+
const postPerms = result.repoPermissions.get('app.bsky.feed.post');
1212
+
assert.deepStrictEqual(postPerms, {
1213
+
create: true,
1214
+
update: true,
1215
+
delete: false,
1216
+
});
1217
+
});
1218
+
1219
+
test('merges multiple scopes for same collection', () => {
1220
+
const result = parseScopesForDisplay(
1221
+
'atproto repo:app.bsky.feed.post?action=create repo:app.bsky.feed.post?action=delete',
1222
+
);
1223
+
const postPerms = result.repoPermissions.get('app.bsky.feed.post');
1224
+
assert.deepStrictEqual(postPerms, {
1225
+
create: true,
1226
+
update: false,
1227
+
delete: true,
1228
+
});
1229
+
});
1230
+
1231
+
test('parses blob scopes', () => {
1232
+
const result = parseScopesForDisplay('atproto blob:image/*');
1233
+
assert.deepStrictEqual(result.blobPermissions, ['image/*']);
1234
+
});
1235
+
1236
+
test('detects transition:generic', () => {
1237
+
const result = parseScopesForDisplay('atproto transition:generic');
1238
+
assert.strictEqual(result.hasTransitionGeneric, true);
1239
+
});
1240
+
1241
+
test('handles empty scope string', () => {
1242
+
const result = parseScopesForDisplay('');
1243
+
assert.strictEqual(result.hasAtproto, false);
1244
+
assert.strictEqual(result.hasTransitionGeneric, false);
1245
+
assert.strictEqual(result.repoPermissions.size, 0);
1246
+
assert.deepStrictEqual(result.blobPermissions, []);
1247
+
});
1248
+
});
+16
tsconfig.json
+16
tsconfig.json
···
1
+
{
2
+
"compilerOptions": {
3
+
"target": "ES2022",
4
+
"module": "ES2022",
5
+
"moduleResolution": "bundler",
6
+
"checkJs": true,
7
+
"allowJs": true,
8
+
"noEmit": true,
9
+
"strict": true,
10
+
"skipLibCheck": true,
11
+
"useUnknownInCatchVariables": false,
12
+
"types": ["@cloudflare/workers-types"]
13
+
},
14
+
"include": ["src/**/*.js"],
15
+
"exclude": ["node_modules"]
16
+
}