This PR introduces a readonly package for read-only PDS functionality and tooling.
+2
packages/core/package.json
+2
packages/core/package.json
+217
packages/core/src/car.js
+217
packages/core/src/car.js
···
1
+
// @pds/core/car - CAR (Content Addressable aRchive) file parser
2
+
// Parses CAR v1 files for importing AT Protocol repositories
3
+
4
+
import { cborDecode, cidToString } from './repo.js';
5
+
6
+
/**
7
+
* Read an unsigned varint from bytes at the given offset
8
+
* @param {Uint8Array} bytes - Input bytes
9
+
* @param {number} offset - Starting offset
10
+
* @returns {[number, number]} - [decoded value, new offset after varint]
11
+
*/
12
+
export function readVarint(bytes, offset) {
13
+
let value = 0;
14
+
let shift = 0;
15
+
let pos = offset;
16
+
17
+
while (pos < bytes.length) {
18
+
const byte = bytes[pos++];
19
+
value |= (byte & 0x7f) << shift;
20
+
if ((byte & 0x80) === 0) {
21
+
return [value, pos];
22
+
}
23
+
shift += 7;
24
+
if (shift > 35) {
25
+
throw new Error('Varint too long');
26
+
}
27
+
}
28
+
29
+
throw new Error('Unexpected end of varint');
30
+
}
31
+
32
+
/**
33
+
* Parse a CID from raw bytes
34
+
* Handles CIDv0 (starts with 0x12) and CIDv1 (starts with 0x01)
35
+
* @param {Uint8Array} bytes - CID bytes
36
+
* @param {number} offset - Starting offset
37
+
* @returns {[Uint8Array, number]} - [CID bytes, new offset]
38
+
*/
39
+
function parseCidFromBytes(bytes, offset) {
40
+
const firstByte = bytes[offset];
41
+
42
+
if (firstByte === 0x12) {
43
+
// CIDv0: SHA-256 multihash (0x12 = sha2-256, 0x20 = 32 bytes)
44
+
// Total: 34 bytes
45
+
const cidLen = 34;
46
+
return [bytes.slice(offset, offset + cidLen), offset + cidLen];
47
+
}
48
+
49
+
// CIDv1: version + codec + multihash
50
+
let pos = offset;
51
+
52
+
// Read version (should be 1)
53
+
const [version, afterVersion] = readVarint(bytes, pos);
54
+
if (version !== 1) {
55
+
throw new Error(`Unsupported CID version: ${version}`);
56
+
}
57
+
pos = afterVersion;
58
+
59
+
// Read codec
60
+
const [_codec, afterCodec] = readVarint(bytes, pos);
61
+
pos = afterCodec;
62
+
63
+
// Read multihash: hash function + digest length + digest
64
+
const [_hashFn, afterHashFn] = readVarint(bytes, pos);
65
+
pos = afterHashFn;
66
+
67
+
const [digestLen, afterDigestLen] = readVarint(bytes, pos);
68
+
pos = afterDigestLen + digestLen;
69
+
70
+
return [bytes.slice(offset, pos), pos];
71
+
}
72
+
73
+
/**
74
+
* @typedef {Object} CarBlock
75
+
* @property {string} cid - Block CID as base32lower string
76
+
* @property {Uint8Array} data - Block data bytes
77
+
*/
78
+
79
+
/**
80
+
* @typedef {Object} ParsedCar
81
+
* @property {string[]} roots - Root CID strings
82
+
* @property {Map<string, Uint8Array>} blocks - Map of CID string to block data
83
+
*/
84
+
85
+
/**
86
+
* Parse a complete CAR file
87
+
* @param {Uint8Array} carBytes - Complete CAR file bytes
88
+
* @returns {ParsedCar} - Parsed CAR with roots and blocks
89
+
*/
90
+
export function parseCarFile(carBytes) {
91
+
let offset = 0;
92
+
93
+
// Read header length (varint)
94
+
const [headerLen, afterHeaderLen] = readVarint(carBytes, offset);
95
+
offset = afterHeaderLen;
96
+
97
+
// Decode header (CBOR)
98
+
const headerBytes = carBytes.slice(offset, offset + headerLen);
99
+
const header = cborDecode(headerBytes);
100
+
offset += headerLen;
101
+
102
+
// Validate header
103
+
if (header.version !== 1) {
104
+
throw new Error(`Unsupported CAR version: ${header.version}`);
105
+
}
106
+
107
+
// Extract roots (CIDs stored as Uint8Array from DAG-CBOR tag 42)
108
+
const roots = (header.roots || []).map(
109
+
(/** @type {Uint8Array} */ cidBytes) => {
110
+
return cidToString(cidBytes);
111
+
},
112
+
);
113
+
114
+
// Parse blocks
115
+
/** @type {Map<string, Uint8Array>} */
116
+
const blocks = new Map();
117
+
118
+
while (offset < carBytes.length) {
119
+
// Read block length (varint)
120
+
const [blockLen, afterBlockLen] = readVarint(carBytes, offset);
121
+
offset = afterBlockLen;
122
+
123
+
if (blockLen === 0) break;
124
+
125
+
// Parse CID
126
+
const [cidBytes, afterCid] = parseCidFromBytes(carBytes, offset);
127
+
const cid = cidToString(cidBytes);
128
+
129
+
// Extract block data
130
+
const dataLen = blockLen - (afterCid - offset);
131
+
const data = carBytes.slice(afterCid, afterCid + dataLen);
132
+
133
+
blocks.set(cid, data);
134
+
offset = afterCid + dataLen;
135
+
}
136
+
137
+
return { roots, blocks };
138
+
}
139
+
140
+
/**
141
+
* Iterate over CAR file blocks (memory-efficient streaming)
142
+
* @param {Uint8Array} carBytes - Complete CAR file bytes
143
+
* @yields {CarBlock} - Block with CID and data
144
+
* @returns {Generator<CarBlock, {roots: string[]}, unknown>}
145
+
*/
146
+
export function* iterateCarBlocks(carBytes) {
147
+
let offset = 0;
148
+
149
+
// Read header length (varint)
150
+
const [headerLen, afterHeaderLen] = readVarint(carBytes, offset);
151
+
offset = afterHeaderLen;
152
+
153
+
// Decode header (CBOR)
154
+
const headerBytes = carBytes.slice(offset, offset + headerLen);
155
+
const header = cborDecode(headerBytes);
156
+
offset += headerLen;
157
+
158
+
// Validate header
159
+
if (header.version !== 1) {
160
+
throw new Error(`Unsupported CAR version: ${header.version}`);
161
+
}
162
+
163
+
// Extract roots for return value
164
+
const roots = (header.roots || []).map(
165
+
(/** @type {Uint8Array} */ cidBytes) => {
166
+
return cidToString(cidBytes);
167
+
},
168
+
);
169
+
170
+
// Iterate blocks
171
+
while (offset < carBytes.length) {
172
+
// Read block length (varint)
173
+
const [blockLen, afterBlockLen] = readVarint(carBytes, offset);
174
+
offset = afterBlockLen;
175
+
176
+
if (blockLen === 0) break;
177
+
178
+
// Parse CID
179
+
const [cidBytes, afterCid] = parseCidFromBytes(carBytes, offset);
180
+
const cid = cidToString(cidBytes);
181
+
182
+
// Extract block data
183
+
const dataLen = blockLen - (afterCid - offset);
184
+
const data = carBytes.slice(afterCid, afterCid + dataLen);
185
+
186
+
yield { cid, data };
187
+
offset = afterCid + dataLen;
188
+
}
189
+
190
+
return { roots };
191
+
}
192
+
193
+
/**
194
+
* Get CAR file header (roots) without parsing all blocks
195
+
* @param {Uint8Array} carBytes - CAR file bytes
196
+
* @returns {{ version: number, roots: string[] }}
197
+
*/
198
+
export function getCarHeader(carBytes) {
199
+
let offset = 0;
200
+
201
+
// Read header length (varint)
202
+
const [headerLen, afterHeaderLen] = readVarint(carBytes, offset);
203
+
offset = afterHeaderLen;
204
+
205
+
// Decode header (CBOR)
206
+
const headerBytes = carBytes.slice(offset, offset + headerLen);
207
+
const header = cborDecode(headerBytes);
208
+
209
+
// Extract roots
210
+
const roots = (header.roots || []).map(
211
+
(/** @type {Uint8Array} */ cidBytes) => {
212
+
return cidToString(cidBytes);
213
+
},
214
+
);
215
+
216
+
return { version: header.version, roots };
217
+
}
+2
packages/core/src/index.js
+2
packages/core/src/index.js
+184
packages/core/src/loader.js
+184
packages/core/src/loader.js
···
1
+
// @pds/core/loader - Repository loader from CAR files
2
+
// Loads AT Protocol repositories from CAR archives into storage
3
+
4
+
import { parseCarFile, getCarHeader } from './car.js';
5
+
import { cborDecode, cidToString } from './repo.js';
6
+
import { walkMst } from './mst.js';
7
+
8
+
/**
9
+
* @typedef {import('./ports.js').ActorStoragePort} ActorStoragePort
10
+
*/
11
+
12
+
/**
13
+
* @typedef {Object} CommitData
14
+
* @property {string} did - Repository DID
15
+
* @property {number} version - Commit version (usually 3)
16
+
* @property {string} rev - Revision string (TID)
17
+
* @property {string|null} prev - Previous commit CID
18
+
* @property {string|null} data - MST root CID
19
+
* @property {Uint8Array} [sig] - Signature bytes
20
+
*/
21
+
22
+
/**
23
+
* @typedef {Object} LoadResult
24
+
* @property {string} did - Repository DID
25
+
* @property {string} commitCid - Root commit CID
26
+
* @property {string} rev - Revision string
27
+
* @property {number} recordCount - Number of records loaded
28
+
* @property {number} blockCount - Number of blocks loaded
29
+
*/
30
+
31
+
/**
32
+
* Load a repository from CAR bytes into storage
33
+
* @param {Uint8Array} carBytes - CAR file bytes
34
+
* @param {ActorStoragePort} actorStorage - Storage to populate
35
+
* @returns {Promise<LoadResult>}
36
+
*/
37
+
export async function loadRepositoryFromCar(carBytes, actorStorage) {
38
+
// Parse the CAR file
39
+
const { roots, blocks } = parseCarFile(carBytes);
40
+
41
+
if (roots.length === 0) {
42
+
throw new Error('CAR file has no roots');
43
+
}
44
+
45
+
const commitCid = roots[0];
46
+
const commitBytes = blocks.get(commitCid);
47
+
48
+
if (!commitBytes) {
49
+
throw new Error(`Commit block not found: ${commitCid}`);
50
+
}
51
+
52
+
// Decode commit
53
+
const commit = cborDecode(commitBytes);
54
+
const did = commit.did;
55
+
const rev = commit.rev;
56
+
const version = commit.version;
57
+
58
+
if (!did || typeof did !== 'string') {
59
+
throw new Error('Invalid commit: missing DID');
60
+
}
61
+
62
+
if (version !== 3 && version !== 2) {
63
+
throw new Error(`Unsupported commit version: ${version}`);
64
+
}
65
+
66
+
// Get MST root CID
67
+
const mstRootCid = commit.data ? cidToString(commit.data) : null;
68
+
69
+
// Store all blocks first
70
+
for (const [cid, data] of blocks) {
71
+
await actorStorage.putBlock(cid, data);
72
+
}
73
+
74
+
// Set metadata
75
+
await actorStorage.setDid(did);
76
+
77
+
// Walk MST and extract records
78
+
let recordCount = 0;
79
+
if (mstRootCid) {
80
+
/**
81
+
* @param {string} cid
82
+
* @returns {Promise<Uint8Array|null>}
83
+
*/
84
+
const getBlock = async (cid) => blocks.get(cid) || null;
85
+
86
+
for await (const { key, cid } of walkMst(mstRootCid, getBlock)) {
87
+
// key format: "collection/rkey"
88
+
const slashIndex = key.indexOf('/');
89
+
if (slashIndex === -1) {
90
+
console.warn(`Invalid record key format: ${key}`);
91
+
continue;
92
+
}
93
+
94
+
const collection = key.slice(0, slashIndex);
95
+
const rkey = key.slice(slashIndex + 1);
96
+
const uri = `at://${did}/${collection}/${rkey}`;
97
+
98
+
// Get record data
99
+
const recordBytes = blocks.get(cid);
100
+
if (!recordBytes) {
101
+
console.warn(`Record block not found: ${cid}`);
102
+
continue;
103
+
}
104
+
105
+
// Store record
106
+
await actorStorage.putRecord(uri, cid, collection, rkey, recordBytes);
107
+
recordCount++;
108
+
}
109
+
}
110
+
111
+
// Store commit
112
+
const prevCommit = await actorStorage.getLatestCommit();
113
+
const seq = prevCommit ? prevCommit.seq + 1 : 1;
114
+
await actorStorage.putCommit(seq, commitCid, rev, commit.prev ? cidToString(commit.prev) : null);
115
+
116
+
return {
117
+
did,
118
+
commitCid,
119
+
rev,
120
+
recordCount,
121
+
blockCount: blocks.size,
122
+
};
123
+
}
124
+
125
+
/**
126
+
* Get repository info from CAR without loading into storage
127
+
* @param {Uint8Array} carBytes - CAR file bytes
128
+
* @returns {{ did: string, commitCid: string, rev: string }}
129
+
*/
130
+
export function getCarRepoInfo(carBytes) {
131
+
const { roots, blocks } = parseCarFile(carBytes);
132
+
133
+
if (roots.length === 0) {
134
+
throw new Error('CAR file has no roots');
135
+
}
136
+
137
+
const commitCid = roots[0];
138
+
const commitBytes = blocks.get(commitCid);
139
+
140
+
if (!commitBytes) {
141
+
throw new Error(`Commit block not found: ${commitCid}`);
142
+
}
143
+
144
+
const commit = cborDecode(commitBytes);
145
+
146
+
return {
147
+
did: commit.did,
148
+
commitCid,
149
+
rev: commit.rev,
150
+
};
151
+
}
152
+
153
+
/**
154
+
* Validate CAR file structure without fully loading
155
+
* @param {Uint8Array} carBytes - CAR file bytes
156
+
* @returns {{ valid: boolean, error?: string, did?: string }}
157
+
*/
158
+
export function validateCarFile(carBytes) {
159
+
try {
160
+
const { version, roots } = getCarHeader(carBytes);
161
+
162
+
if (version !== 1) {
163
+
return { valid: false, error: `Unsupported CAR version: ${version}` };
164
+
}
165
+
166
+
if (roots.length === 0) {
167
+
return { valid: false, error: 'CAR file has no roots' };
168
+
}
169
+
170
+
// Quick parse to verify commit
171
+
const info = getCarRepoInfo(carBytes);
172
+
173
+
if (!info.did) {
174
+
return { valid: false, error: 'Missing DID in commit' };
175
+
}
176
+
177
+
return { valid: true, did: info.did };
178
+
} catch (err) {
179
+
return {
180
+
valid: false,
181
+
error: err instanceof Error ? err.message : String(err),
182
+
};
183
+
}
184
+
}
+66
packages/core/src/pds.js
+66
packages/core/src/pds.js
···
310
310
* @param {string} [config.password] - Password for createSession
311
311
* @param {WebSocketPort} [config.webSocket] - WebSocket port for subscribeRepos
312
312
* @param {import('./ports.js').LexiconResolverPort} [config.lexiconResolver] - Optional lexicon resolver for record validation
313
+
* @param {boolean} [config.readOnly] - If true, reject all write operations with AuthenticationRequired
313
314
*/
314
315
constructor({
315
316
actorStorage,
···
323
324
password,
324
325
webSocket,
325
326
lexiconResolver,
327
+
readOnly,
326
328
}) {
327
329
this.actorStorage = actorStorage;
328
330
this.sharedStorage = sharedStorage;
···
335
337
this.password = password;
336
338
this.webSocket = webSocket;
337
339
this.lexiconResolver = lexiconResolver;
340
+
this.readOnly = readOnly ?? false;
338
341
this.lastCrawlNotify = 0;
339
342
340
343
// Cache for derived values
···
342
345
this._signingKey = null;
343
346
}
344
347
348
+
/**
349
+
* Return read-only rejection response
350
+
* @returns {Response}
351
+
*/
352
+
readOnlyError() {
353
+
return Response.json(
354
+
{ error: 'AuthenticationRequired', message: 'This PDS is read-only' },
355
+
{ status: 401 },
356
+
);
357
+
}
358
+
359
+
/**
360
+
* Check if endpoint is a write operation that should be blocked in read-only mode
361
+
* @param {string} pathname
362
+
* @returns {boolean}
363
+
*/
364
+
isWriteEndpoint(pathname) {
365
+
const writeEndpoints = [
366
+
'/init',
367
+
'/xrpc/com.atproto.server.createSession',
368
+
'/xrpc/com.atproto.server.refreshSession',
369
+
'/xrpc/com.atproto.repo.createRecord',
370
+
'/xrpc/com.atproto.repo.putRecord',
371
+
'/xrpc/com.atproto.repo.deleteRecord',
372
+
'/xrpc/com.atproto.repo.applyWrites',
373
+
'/xrpc/com.atproto.repo.uploadBlob',
374
+
'/xrpc/app.bsky.actor.putPreferences',
375
+
'/oauth/par',
376
+
'/oauth/token',
377
+
'/oauth/revoke',
378
+
];
379
+
return writeEndpoints.includes(pathname);
380
+
}
381
+
345
382
/**
346
383
* Get the PDS DID (cached)
347
384
* @returns {Promise<string|null>}
···
428
465
);
429
466
}
430
467
468
+
// Check read-only mode for write operations
469
+
if (this.readOnly && this.isWriteEndpoint(pathname)) {
470
+
return addCorsHeaders(this.readOnlyError());
471
+
}
472
+
431
473
// Check authentication if required
432
474
let auth = null;
433
475
if (route.auth) {
···
582
624
* @returns {Promise<Response>}
583
625
*/
584
626
async handleCreateSession(request) {
627
+
if (this.readOnly) return this.readOnlyError();
628
+
585
629
const { identifier, password } = await request.json();
586
630
587
631
// Verify password
···
648
692
* @returns {Promise<Response>}
649
693
*/
650
694
async handleRefreshSession(request) {
695
+
if (this.readOnly) return this.readOnlyError();
696
+
651
697
const authHeader = request.headers.get('authorization');
652
698
if (!authHeader) {
653
699
return Response.json(
···
743
789
* @returns {Promise<Response>}
744
790
*/
745
791
async handleCreateRecord(request, _url, auth) {
792
+
if (this.readOnly) return this.readOnlyError();
793
+
746
794
if (!auth) {
747
795
return Response.json(
748
796
{ error: 'AuthenticationRequired', message: 'Authentication required' },
···
948
996
* @returns {Promise<Response>}
949
997
*/
950
998
async handlePutRecord(request, _url, auth) {
999
+
if (this.readOnly) return this.readOnlyError();
1000
+
951
1001
if (!auth) {
952
1002
return Response.json(
953
1003
{ error: 'AuthenticationRequired', message: 'Authentication required' },
···
1080
1130
* @returns {Promise<Response>}
1081
1131
*/
1082
1132
async handleApplyWrites(request, _url, auth) {
1133
+
if (this.readOnly) return this.readOnlyError();
1134
+
1083
1135
if (!auth) {
1084
1136
return Response.json(
1085
1137
{ error: 'AuthenticationRequired', message: 'Authentication required' },
···
1298
1350
* @returns {Promise<Response>}
1299
1351
*/
1300
1352
async handleDeleteRecord(request, _url, auth) {
1353
+
if (this.readOnly) return this.readOnlyError();
1354
+
1301
1355
if (!auth) {
1302
1356
return Response.json(
1303
1357
{ error: 'AuthenticationRequired', message: 'Authentication required' },
···
1434
1488
* @returns {Promise<Response>}
1435
1489
*/
1436
1490
async handlePutPreferences(request, _url, auth) {
1491
+
if (this.readOnly) return this.readOnlyError();
1492
+
1437
1493
if (!auth) {
1438
1494
return Response.json(
1439
1495
{ error: 'AuthenticationRequired', message: 'Authentication required' },
···
1609
1665
* @returns {Promise<Response>}
1610
1666
*/
1611
1667
async handleOAuthPar(request, url) {
1668
+
if (this.readOnly) return this.readOnlyError();
1669
+
1612
1670
const issuer = `${url.protocol}//${url.host}`;
1613
1671
1614
1672
// Validate DPoP
···
1891
1949
* @returns {Promise<Response>}
1892
1950
*/
1893
1951
async handleOAuthToken(request, url) {
1952
+
if (this.readOnly) return this.readOnlyError();
1953
+
1894
1954
const issuer = `${url.protocol}//${url.host}`;
1895
1955
1896
1956
// Validate DPoP
···
2240
2300
* @returns {Promise<Response>}
2241
2301
*/
2242
2302
async handleOAuthRevoke(request) {
2303
+
if (this.readOnly) return this.readOnlyError();
2304
+
2243
2305
const contentType = request.headers.get('content-type') || '';
2244
2306
let params;
2245
2307
if (contentType.includes('application/json')) {
···
2627
2689
* @returns {Promise<Response>}
2628
2690
*/
2629
2691
async handleUploadBlob(request, _url, auth) {
2692
+
if (this.readOnly) return this.readOnlyError();
2693
+
2630
2694
if (!auth) {
2631
2695
return Response.json(
2632
2696
{ error: 'AuthenticationRequired', message: 'Authentication required' },
···
2895
2959
* @returns {Promise<Response>}
2896
2960
*/
2897
2961
async handleInit(request) {
2962
+
if (this.readOnly) return this.readOnlyError();
2963
+
2898
2964
const body = await request.json();
2899
2965
if (!body.did || !body.privateKey) {
2900
2966
return Response.json(
+242
packages/readonly/CLOUDFLARE_DEPLOY.md
+242
packages/readonly/CLOUDFLARE_DEPLOY.md
···
1
+
# Deploying Read-Only PDS to Cloudflare Workers
2
+
3
+
Deploy a read-only AT Protocol PDS to Cloudflare Workers with sample records.
4
+
5
+
## Prerequisites
6
+
7
+
- Node.js 20+, pnpm
8
+
- [Wrangler CLI](https://developers.cloudflare.com/workers/wrangler/install-and-update/)
9
+
- Cloudflare account with Workers and R2 enabled
10
+
11
+
## Step 1: Build the CAR File
12
+
13
+
```bash
14
+
cd pds.js
15
+
16
+
# Generate a signing key
17
+
node packages/readonly/src/build-car.js --generate-key
18
+
# Save the privateKey output
19
+
20
+
# Create sample records
21
+
cat > records.json << 'EOF'
22
+
[
23
+
{
24
+
"collection": "app.bsky.actor.profile",
25
+
"rkey": "self",
26
+
"record": {
27
+
"$type": "app.bsky.actor.profile",
28
+
"displayName": "Read-Only Archive",
29
+
"description": "A read-only archive served from Cloudflare Workers."
30
+
}
31
+
},
32
+
{
33
+
"collection": "app.bsky.feed.post",
34
+
"rkey": "3abc123def456",
35
+
"record": {
36
+
"$type": "app.bsky.feed.post",
37
+
"text": "Hello from my read-only PDS!",
38
+
"createdAt": "2024-01-15T12:00:00.000Z"
39
+
}
40
+
},
41
+
{
42
+
"collection": "app.bsky.feed.post",
43
+
"rkey": "3abc123def789",
44
+
"record": {
45
+
"$type": "app.bsky.feed.post",
46
+
"text": "The AT Protocol makes data portable!",
47
+
"createdAt": "2024-01-16T14:30:00.000Z"
48
+
}
49
+
}
50
+
]
51
+
EOF
52
+
53
+
# Build CAR (replace did and key with your values)
54
+
node packages/readonly/src/build-car.js \
55
+
-o repo.car \
56
+
-d did:plc:your-did-here \
57
+
-k <your-private-key-hex> \
58
+
-F records.json
59
+
```
60
+
61
+
## Step 2: Create Worker Project
62
+
63
+
```bash
64
+
mkdir cloudflare-pds && cd cloudflare-pds
65
+
mv ../repo.car .
66
+
```
67
+
68
+
Create `wrangler.toml`:
69
+
70
+
```toml
71
+
name = "readonly-pds"
72
+
main = "worker.js"
73
+
compatibility_date = "2024-01-01"
74
+
75
+
[[r2_buckets]]
76
+
binding = "REPO_BUCKET"
77
+
bucket_name = "pds-repos"
78
+
```
79
+
80
+
Create `worker.js`:
81
+
82
+
```javascript
83
+
// Read-Only PDS Worker - serves AT Protocol repo from R2-stored CAR file
84
+
// CAR/CBOR parsing adapted from @pds/core (see packages/core/src/car.js, repo.js)
85
+
86
+
export default {
87
+
async fetch(request, env) {
88
+
const url = new URL(request.url);
89
+
const cors = { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'GET, OPTIONS' };
90
+
91
+
if (request.method === 'OPTIONS') return new Response(null, { headers: cors });
92
+
if (request.method !== 'GET') return json({ error: 'MethodNotAllowed' }, 405, cors);
93
+
94
+
try {
95
+
const repo = await loadRepo(env);
96
+
if (!repo) return json({ error: 'RepoNotFound' }, 404, cors);
97
+
98
+
const p = url.pathname;
99
+
const q = (k) => url.searchParams.get(k);
100
+
101
+
if (p === '/xrpc/com.atproto.sync.listRepos') {
102
+
return json({ repos: [{ did: repo.did, head: repo.commitCid, rev: repo.rev, active: true }] }, 200, cors);
103
+
}
104
+
if (p === '/xrpc/com.atproto.sync.getRepo' && q('did') === repo.did) {
105
+
return new Response(repo.carBytes, { headers: { ...cors, 'Content-Type': 'application/vnd.ipld.car' } });
106
+
}
107
+
if (p === '/xrpc/com.atproto.sync.getLatestCommit' && q('did') === repo.did) {
108
+
return json({ cid: repo.commitCid, rev: repo.rev }, 200, cors);
109
+
}
110
+
if (p === '/xrpc/com.atproto.repo.describeRepo' && q('repo') === repo.did) {
111
+
const collections = [...new Set([...repo.records.keys()].map(u => u.split('/')[3]))];
112
+
return json({ did: repo.did, handle: 'handle.invalid', collections, didDoc: {} }, 200, cors);
113
+
}
114
+
if (p === '/xrpc/com.atproto.repo.getRecord') {
115
+
const rec = repo.records.get(`at://${q('repo')}/${q('collection')}/${q('rkey')}`);
116
+
return rec ? json({ uri: rec.uri, cid: rec.cid, value: rec.value }, 200, cors) : json({ error: 'RecordNotFound' }, 404, cors);
117
+
}
118
+
if (p === '/xrpc/com.atproto.repo.listRecords') {
119
+
const prefix = `at://${q('repo')}/${q('collection')}/`;
120
+
const recs = [...repo.records.values()].filter(r => r.uri.startsWith(prefix)).sort((a, b) => b.uri.localeCompare(a.uri));
121
+
return json({ records: recs.slice(0, 50) }, 200, cors);
122
+
}
123
+
if (p.startsWith('/xrpc/')) return json({ error: 'AuthenticationRequired', message: 'Read-only PDS' }, 401, cors);
124
+
if (p === '/') return new Response('Read-Only PDS', { headers: cors });
125
+
return json({ error: 'NotFound' }, 404, cors);
126
+
} catch (e) {
127
+
return json({ error: 'InternalServerError', message: e.message }, 500, cors);
128
+
}
129
+
}
130
+
};
131
+
132
+
const json = (data, status, cors) => new Response(JSON.stringify(data), { status, headers: { ...cors, 'Content-Type': 'application/json' } });
133
+
134
+
let cached;
135
+
async function loadRepo(env) {
136
+
if (cached) return cached;
137
+
const obj = await env.REPO_BUCKET.get('repo.car');
138
+
if (!obj) return null;
139
+
const carBytes = new Uint8Array(await obj.arrayBuffer());
140
+
const { roots, blocks } = parseCar(carBytes);
141
+
const commit = cbor(blocks.get(roots[0]));
142
+
const records = new Map();
143
+
if (commit.data) walkMst(blocks, cidStr(commit.data), (k, v) => {
144
+
const val = blocks.get(v);
145
+
if (val) { const [c, r] = k.split('/'); records.set(`at://${commit.did}/${c}/${r}`, { uri: `at://${commit.did}/${c}/${r}`, cid: v, value: cbor(val) }); }
146
+
});
147
+
return cached = { did: commit.did, commitCid: roots[0], rev: commit.rev, records, carBytes };
148
+
}
149
+
150
+
// Minimal CAR parser (see @pds/core/src/car.js for full implementation)
151
+
function parseCar(bytes) {
152
+
let o = 0;
153
+
const varint = () => { let r = 0, s = 0; while (bytes[o] & 0x80) r |= (bytes[o++] & 0x7f) << s, s += 7; return r | (bytes[o++] << s); };
154
+
const hLen = varint(), h = cbor(bytes.slice(o, o += hLen));
155
+
const roots = h.roots.map(r => cidStr(r)), blocks = new Map();
156
+
while (o < bytes.length) {
157
+
const bLen = varint(), end = o + bLen;
158
+
if (bytes[o] === 1) { o++; const codec = varint(), hf = varint(), hl = varint(), hash = bytes.slice(o, o += hl);
159
+
blocks.set('b' + b32([1, ...enc(codec), ...enc(hf), ...enc(hl), ...hash]), bytes.slice(o, end)); }
160
+
o = end;
161
+
}
162
+
return { roots, blocks };
163
+
}
164
+
165
+
// Minimal CBOR decoder (see @pds/core/src/repo.js for full implementation)
166
+
function cbor(b) {
167
+
let o = 0;
168
+
const read = () => {
169
+
const i = b[o++], maj = i >> 5, add = i & 0x1f;
170
+
let v = add < 24 ? add : add === 24 ? b[o++] : add === 25 ? (b[o++] << 8) | b[o++] : add === 26 ? (b[o++] << 24 | b[o++] << 16 | b[o++] << 8 | b[o++]) >>> 0 : 0;
171
+
if (maj === 0) return v; if (maj === 1) return -1 - v;
172
+
if (maj === 2) { const r = b.slice(o, o + v); o += v; return r; }
173
+
if (maj === 3) { const r = new TextDecoder().decode(b.slice(o, o + v)); o += v; return r; }
174
+
if (maj === 4) { const a = []; for (let i = 0; i < v; i++) a.push(read()); return a; }
175
+
if (maj === 5) { const m = {}; for (let i = 0; i < v; i++) m[read()] = read(); return m; }
176
+
if (maj === 6) { const t = read(); return v === 42 ? { bytes: t.slice(1) } : t; }
177
+
if (maj === 7) return add === 20 ? false : add === 21 ? true : add === 22 ? null : undefined;
178
+
};
179
+
return read();
180
+
}
181
+
182
+
function walkMst(blocks, cid, cb) {
183
+
const n = cbor(blocks.get(cid)); if (!n?.e) return;
184
+
let key = '';
185
+
for (const e of n.e) {
186
+
key = key.slice(0, e.p || 0) + (e.k ? new TextDecoder().decode(e.k) : '');
187
+
if (e.l) walkMst(blocks, cidStr(e.l), cb);
188
+
if (e.v) cb(key, cidStr(e.v));
189
+
}
190
+
if (n.e.length && n.e[n.e.length - 1].t) walkMst(blocks, cidStr(n.e[n.e.length - 1].t), cb);
191
+
}
192
+
193
+
const cidStr = (c) => c?.bytes ? 'b' + b32(c.bytes) : String(c);
194
+
const b32 = (b) => { let bits = 0, val = 0, out = ''; const A = 'abcdefghijklmnopqrstuvwxyz234567';
195
+
for (const byte of b) { val = (val << 8) | byte; bits += 8; while (bits >= 5) out += A[(val >>> (bits -= 5)) & 31]; }
196
+
return bits > 0 ? out + A[(val << (5 - bits)) & 31] : out; };
197
+
const enc = (v) => { const b = []; while (v > 0x7f) b.push((v & 0x7f) | 0x80), v >>>= 7; b.push(v); return b; };
198
+
```
199
+
200
+
## Step 3: Deploy
201
+
202
+
```bash
203
+
# Create R2 bucket and upload CAR
204
+
wrangler r2 bucket create pds-repos
205
+
wrangler r2 object put pds-repos/repo.car --file repo.car
206
+
207
+
# Deploy worker
208
+
wrangler deploy
209
+
```
210
+
211
+
## Step 4: Test
212
+
213
+
```bash
214
+
BASE_URL="https://readonly-pds.your-account.workers.dev"
215
+
216
+
# List repos
217
+
curl "$BASE_URL/xrpc/com.atproto.sync.listRepos"
218
+
219
+
# Get record
220
+
curl "$BASE_URL/xrpc/com.atproto.repo.getRecord?repo=did:plc:your-did&collection=app.bsky.feed.post&rkey=3abc123def456"
221
+
222
+
# List posts
223
+
curl "$BASE_URL/xrpc/com.atproto.repo.listRecords?repo=did:plc:your-did&collection=app.bsky.feed.post"
224
+
225
+
# Write endpoint (returns 401)
226
+
curl -X POST "$BASE_URL/xrpc/com.atproto.repo.createRecord"
227
+
```
228
+
229
+
## Custom Domain (Optional)
230
+
231
+
Add to `wrangler.toml`:
232
+
233
+
```toml
234
+
routes = [{ pattern = "pds.yourdomain.com", custom_domain = true }]
235
+
```
236
+
237
+
## Limitations
238
+
239
+
- **No blobs** - Use R2 directly for media
240
+
- **Single repo** - Modify R2 path logic for multiple repos
241
+
- **No WebSocket** - `subscribeRepos` requires Durable Objects
242
+
- **Memory limit** - Keep CAR files under 10MB
+36
packages/readonly/Dockerfile
+36
packages/readonly/Dockerfile
···
1
+
FROM node:20-slim
2
+
3
+
# Install pnpm
4
+
RUN npm install -g pnpm
5
+
6
+
WORKDIR /app
7
+
8
+
# Copy package files first for better caching
9
+
COPY package.json pnpm-lock.yaml* pnpm-workspace.yaml* ./
10
+
COPY packages/core/package.json ./packages/core/
11
+
COPY packages/storage-sqlite/package.json ./packages/storage-sqlite/
12
+
COPY packages/blobs-fs/package.json ./packages/blobs-fs/
13
+
COPY packages/readonly/package.json ./packages/readonly/
14
+
15
+
# Install dependencies
16
+
RUN pnpm install --frozen-lockfile || pnpm install
17
+
18
+
# Copy source code
19
+
COPY packages/core/ ./packages/core/
20
+
COPY packages/storage-sqlite/ ./packages/storage-sqlite/
21
+
COPY packages/blobs-fs/ ./packages/blobs-fs/
22
+
COPY packages/readonly/ ./packages/readonly/
23
+
24
+
# Create data directories
25
+
RUN mkdir -p /data/repos /data/blobs /data/db
26
+
27
+
# Set environment variables
28
+
ENV PDS_PORT=3000
29
+
ENV PDS_HOSTNAME=localhost
30
+
31
+
# Expose port
32
+
EXPOSE 3000
33
+
34
+
# Run the read-only PDS
35
+
ENTRYPOINT ["node", "packages/readonly/src/cli.js"]
36
+
CMD ["--data-dir", "/data/db", "--port", "3000"]
+32
packages/readonly/docker-compose.yaml
+32
packages/readonly/docker-compose.yaml
···
1
+
services:
2
+
pds-readonly:
3
+
build:
4
+
context: ../..
5
+
dockerfile: packages/readonly/Dockerfile
6
+
ports:
7
+
- "${PDS_PORT:-3000}:3000"
8
+
volumes:
9
+
# Mount CAR files as read-only
10
+
- ${PDS_CAR_DIR:-./data/repos}:/repos:ro
11
+
# Mount blobs as read-only (optional)
12
+
- ${PDS_BLOBS_DIR:-./data/blobs}:/blobs:ro
13
+
# Mount database directory (writable for SQLite)
14
+
- pds-data:/data/db
15
+
environment:
16
+
- PDS_PORT=3000
17
+
- PDS_HOSTNAME=${PDS_HOSTNAME:-localhost}
18
+
command:
19
+
- --car
20
+
- /repos
21
+
- --blobs
22
+
- /blobs
23
+
- --data-dir
24
+
- /data/db
25
+
- --port
26
+
- "3000"
27
+
- --hostname
28
+
- ${PDS_HOSTNAME:-localhost}
29
+
restart: unless-stopped
30
+
31
+
volumes:
32
+
pds-data:
+22
packages/readonly/package.json
+22
packages/readonly/package.json
···
1
+
{
2
+
"name": "@pds/readonly",
3
+
"version": "0.8.0",
4
+
"type": "module",
5
+
"main": "./src/index.js",
6
+
"types": "./src/index.d.ts",
7
+
"bin": {
8
+
"pds-readonly": "./src/cli.js",
9
+
"build-car": "./src/build-car.js"
10
+
},
11
+
"exports": {
12
+
".": "./src/index.js"
13
+
},
14
+
"dependencies": {
15
+
"@pds/core": "workspace:*",
16
+
"@pds/storage-sqlite": "workspace:*",
17
+
"@pds/blobs-fs": "workspace:*"
18
+
},
19
+
"peerDependencies": {
20
+
"better-sqlite3": ">=9.0.0"
21
+
}
22
+
}
+361
packages/readonly/src/build-car.js
+361
packages/readonly/src/build-car.js
···
1
+
#!/usr/bin/env node
2
+
// @pds/readonly/build-car - Build CAR files for read-only PDS
3
+
// Creates a signed repository CAR file from records
4
+
5
+
import fs from 'node:fs';
6
+
import {
7
+
buildCarFile,
8
+
cborEncodeDagCbor,
9
+
CID,
10
+
cidToBytes,
11
+
cidToString,
12
+
createCid,
13
+
createTid,
14
+
} from '@pds/core/repo';
15
+
import { buildMst } from '@pds/core';
16
+
import { importPrivateKey, sign, generateKeyPair, hexToBytes, bytesToHex } from '@pds/core/crypto';
17
+
18
+
/**
19
+
* @typedef {Object} RecordInput
20
+
* @property {string} collection - Collection NSID (e.g., "app.bsky.feed.post")
21
+
* @property {string} rkey - Record key
22
+
* @property {object} record - Record data
23
+
*/
24
+
25
+
/**
26
+
* Build a CAR file from records
27
+
* @param {Object} options
28
+
* @param {string} options.did - Repository DID
29
+
* @param {Uint8Array} options.privateKey - 32-byte P-256 private key
30
+
* @param {RecordInput[]} options.records - Records to include
31
+
* @param {string} [options.prevCommitCid] - Previous commit CID (for chaining)
32
+
* @returns {Promise<{carBytes: Uint8Array, commitCid: string, rev: string}>}
33
+
*/
34
+
export async function buildRepositoryCar({ did, privateKey, records, prevCommitCid }) {
35
+
/** @type {Array<{cid: string, data: Uint8Array}>} */
36
+
const blocks = [];
37
+
38
+
// Encode records and build MST entries
39
+
/** @type {Array<{key: string, cid: string}>} */
40
+
const mstEntries = [];
41
+
42
+
for (const { collection, rkey, record } of records) {
43
+
const recordBytes = cborEncodeDagCbor(record);
44
+
const recordCid = cidToString(await createCid(recordBytes));
45
+
blocks.push({ cid: recordCid, data: recordBytes });
46
+
mstEntries.push({ key: `${collection}/${rkey}`, cid: recordCid });
47
+
}
48
+
49
+
// Sort entries for MST (required for deterministic tree structure)
50
+
mstEntries.sort((a, b) => a.key.localeCompare(b.key));
51
+
52
+
// Build MST
53
+
const mstRoot = await buildMst(mstEntries, async (cid, data) => {
54
+
blocks.push({ cid, data });
55
+
});
56
+
57
+
// Create revision ID
58
+
const rev = createTid();
59
+
60
+
// Build unsigned commit
61
+
const commit = {
62
+
did,
63
+
version: 3,
64
+
rev,
65
+
prev: prevCommitCid ? new CID(cidToBytes(prevCommitCid)) : null,
66
+
data: mstRoot ? new CID(cidToBytes(mstRoot)) : null,
67
+
};
68
+
69
+
// Sign commit
70
+
const signingKey = await importPrivateKey(privateKey);
71
+
const commitBytes = cborEncodeDagCbor(commit);
72
+
const sig = await sign(signingKey, commitBytes);
73
+
const signedCommit = { ...commit, sig };
74
+
75
+
// Encode signed commit
76
+
const signedCommitBytes = cborEncodeDagCbor(signedCommit);
77
+
const commitCid = cidToString(await createCid(signedCommitBytes));
78
+
blocks.push({ cid: commitCid, data: signedCommitBytes });
79
+
80
+
// Build CAR file
81
+
const carBytes = buildCarFile(commitCid, blocks);
82
+
83
+
return { carBytes, commitCid, rev };
84
+
}
85
+
86
+
/**
87
+
* Parse command line arguments
88
+
* @returns {{
89
+
* output: string,
90
+
* did: string,
91
+
* privateKey: Uint8Array | null,
92
+
* generateKey: boolean,
93
+
* collection: string | null,
94
+
* rkey: string | null,
95
+
* record: object | null,
96
+
* recordsFile: string | null,
97
+
* help: boolean
98
+
* }}
99
+
*/
100
+
function parseArgs() {
101
+
const args = process.argv.slice(2);
102
+
let output = '';
103
+
let did = '';
104
+
/** @type {Uint8Array | null} */
105
+
let privateKey = null;
106
+
let generateKey = false;
107
+
/** @type {string | null} */
108
+
let collection = null;
109
+
/** @type {string | null} */
110
+
let rkey = null;
111
+
/** @type {object | null} */
112
+
let record = null;
113
+
/** @type {string | null} */
114
+
let recordsFile = null;
115
+
let help = false;
116
+
117
+
for (let i = 0; i < args.length; i++) {
118
+
const arg = args[i];
119
+
120
+
if (arg === '--output' || arg === '-o') {
121
+
output = args[++i];
122
+
} else if (arg === '--did' || arg === '-d') {
123
+
did = args[++i];
124
+
} else if (arg === '--key' || arg === '-k') {
125
+
const keyArg = args[++i];
126
+
if (keyArg) {
127
+
// Support hex-encoded key or file path
128
+
if (keyArg.length === 64 && /^[0-9a-fA-F]+$/.test(keyArg)) {
129
+
privateKey = hexToBytes(keyArg);
130
+
} else if (fs.existsSync(keyArg)) {
131
+
const keyData = fs.readFileSync(keyArg);
132
+
// Support raw binary or hex file
133
+
if (keyData.length === 32) {
134
+
privateKey = new Uint8Array(keyData);
135
+
} else {
136
+
const hex = keyData.toString('utf-8').trim();
137
+
privateKey = hexToBytes(hex);
138
+
}
139
+
} else {
140
+
console.error(`Error: Key file not found or invalid hex: ${keyArg}`);
141
+
process.exit(1);
142
+
}
143
+
}
144
+
} else if (arg === '--generate-key' || arg === '-g') {
145
+
generateKey = true;
146
+
} else if (arg === '--collection' || arg === '-c') {
147
+
collection = args[++i];
148
+
} else if (arg === '--rkey' || arg === '-r') {
149
+
rkey = args[++i];
150
+
} else if (arg === '--record' || arg === '-R') {
151
+
const recordArg = args[++i];
152
+
if (recordArg) {
153
+
try {
154
+
record = JSON.parse(recordArg);
155
+
} catch (err) {
156
+
console.error(`Error: Invalid JSON for record: ${err}`);
157
+
process.exit(1);
158
+
}
159
+
}
160
+
} else if (arg === '--record-file' || arg === '-f') {
161
+
const filePath = args[++i];
162
+
if (filePath && fs.existsSync(filePath)) {
163
+
try {
164
+
record = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
165
+
} catch (err) {
166
+
console.error(`Error: Failed to parse record file: ${err}`);
167
+
process.exit(1);
168
+
}
169
+
} else {
170
+
console.error(`Error: Record file not found: ${filePath}`);
171
+
process.exit(1);
172
+
}
173
+
} else if (arg === '--records' || arg === '-F') {
174
+
recordsFile = args[++i];
175
+
} else if (arg === '--help' || arg === '-h') {
176
+
help = true;
177
+
} else if (arg.startsWith('-')) {
178
+
console.error(`Unknown option: ${arg}`);
179
+
process.exit(1);
180
+
}
181
+
}
182
+
183
+
return { output, did, privateKey, generateKey, collection, rkey, record, recordsFile, help };
184
+
}
185
+
186
+
function printHelp() {
187
+
console.log(`
188
+
build-car - Build a CAR file for read-only PDS
189
+
190
+
Usage:
191
+
build-car [options]
192
+
193
+
Options:
194
+
--output, -o <path> Output CAR file path (required)
195
+
--did, -d <did> Repository DID (required)
196
+
--key, -k <key> Private key (hex string or file path)
197
+
--generate-key, -g Generate a new key pair and print to stdout
198
+
--collection, -c <nsid> Collection NSID for single record mode
199
+
--rkey, -r <rkey> Record key for single record mode
200
+
--record, -R <json> Record JSON for single record mode
201
+
--record-file, -f <path> Path to JSON file containing record
202
+
--records, -F <path> Path to JSON file containing array of records
203
+
--help, -h Show this help message
204
+
205
+
Single Record Mode:
206
+
build-car -o repo.car -d did:plc:abc123 -k <key> \\
207
+
-c app.bsky.feed.post -r 3abc123 -R '{"text":"Hello"}'
208
+
209
+
Multiple Records Mode (JSON file format):
210
+
[
211
+
{"collection": "app.bsky.feed.post", "rkey": "3abc", "record": {"text": "Post 1"}},
212
+
{"collection": "app.bsky.feed.post", "rkey": "3def", "record": {"text": "Post 2"}}
213
+
]
214
+
215
+
build-car -o repo.car -d did:plc:abc123 -k <key> -F records.json
216
+
217
+
Key Generation:
218
+
build-car --generate-key
219
+
# Outputs: privateKey=<hex> publicKey=<hex>
220
+
221
+
Examples:
222
+
# Generate a new key
223
+
build-car -g > keys.txt
224
+
225
+
# Create a CAR with a single post
226
+
build-car -o my-repo.car -d did:plc:abc123 -k private.key \\
227
+
-c app.bsky.feed.post -r 3abc123def \\
228
+
-R '{"$type":"app.bsky.feed.post","text":"Hello World!","createdAt":"2024-01-01T00:00:00Z"}'
229
+
230
+
# Create a CAR from a records file
231
+
build-car -o my-repo.car -d did:plc:abc123 -k <hex-key> -F records.json
232
+
`);
233
+
}
234
+
235
+
async function main() {
236
+
const {
237
+
output,
238
+
did,
239
+
privateKey,
240
+
generateKey,
241
+
collection,
242
+
rkey,
243
+
record,
244
+
recordsFile,
245
+
help,
246
+
} = parseArgs();
247
+
248
+
if (help) {
249
+
printHelp();
250
+
process.exit(0);
251
+
}
252
+
253
+
// Handle key generation
254
+
if (generateKey) {
255
+
const { privateKey: privKey, publicKey: pubKey } = await generateKeyPair();
256
+
console.log(`privateKey=${bytesToHex(privKey)}`);
257
+
console.log(`publicKey=${bytesToHex(pubKey)}`);
258
+
console.log(`# Save the privateKey securely. Use it with --key option.`);
259
+
process.exit(0);
260
+
}
261
+
262
+
// Validate required arguments
263
+
if (!output) {
264
+
console.error('Error: --output is required');
265
+
printHelp();
266
+
process.exit(1);
267
+
}
268
+
269
+
if (!did) {
270
+
console.error('Error: --did is required');
271
+
printHelp();
272
+
process.exit(1);
273
+
}
274
+
275
+
if (!privateKey) {
276
+
console.error('Error: --key is required (or use --generate-key to create one)');
277
+
printHelp();
278
+
process.exit(1);
279
+
}
280
+
281
+
// Build records array
282
+
/** @type {RecordInput[]} */
283
+
let records = [];
284
+
285
+
if (recordsFile) {
286
+
// Multiple records from file
287
+
if (!fs.existsSync(recordsFile)) {
288
+
console.error(`Error: Records file not found: ${recordsFile}`);
289
+
process.exit(1);
290
+
}
291
+
try {
292
+
const data = JSON.parse(fs.readFileSync(recordsFile, 'utf-8'));
293
+
if (!Array.isArray(data)) {
294
+
console.error('Error: Records file must contain an array');
295
+
process.exit(1);
296
+
}
297
+
records = data;
298
+
} catch (err) {
299
+
console.error(`Error: Failed to parse records file: ${err}`);
300
+
process.exit(1);
301
+
}
302
+
} else if (collection && rkey && record) {
303
+
// Single record mode
304
+
records = [{ collection, rkey, record }];
305
+
} else {
306
+
console.error('Error: Either provide --collection, --rkey, and --record for single record,');
307
+
console.error(' or --records for multiple records from a file.');
308
+
printHelp();
309
+
process.exit(1);
310
+
}
311
+
312
+
if (records.length === 0) {
313
+
console.error('Error: No records provided');
314
+
process.exit(1);
315
+
}
316
+
317
+
// Validate records
318
+
for (const r of records) {
319
+
if (!r.collection || !r.rkey || !r.record) {
320
+
console.error('Error: Each record must have collection, rkey, and record fields');
321
+
process.exit(1);
322
+
}
323
+
}
324
+
325
+
console.log(`Building CAR file for ${did}...`);
326
+
console.log(`Records: ${records.length}`);
327
+
328
+
try {
329
+
const { carBytes, commitCid, rev } = await buildRepositoryCar({
330
+
did,
331
+
privateKey,
332
+
records,
333
+
});
334
+
335
+
fs.writeFileSync(output, carBytes);
336
+
337
+
console.log(`\nCAR file written: ${output}`);
338
+
console.log(`Commit CID: ${commitCid}`);
339
+
console.log(`Revision: ${rev}`);
340
+
console.log(`Size: ${carBytes.length} bytes`);
341
+
console.log(`\nRecords included:`);
342
+
for (const r of records) {
343
+
console.log(` ${r.collection}/${r.rkey}`);
344
+
}
345
+
} catch (err) {
346
+
console.error(`Error building CAR: ${err}`);
347
+
process.exit(1);
348
+
}
349
+
}
350
+
351
+
// Only run CLI if this is the main module
352
+
const isMainModule = import.meta.url === `file://${process.argv[1]}` ||
353
+
process.argv[1]?.endsWith('/build-car.js') ||
354
+
process.argv[1]?.endsWith('/build-car');
355
+
356
+
if (isMainModule) {
357
+
main().catch((err) => {
358
+
console.error('Fatal error:', err);
359
+
process.exit(1);
360
+
});
361
+
}
+302
packages/readonly/src/cli.js
+302
packages/readonly/src/cli.js
···
1
+
#!/usr/bin/env node
2
+
// @pds/readonly CLI - Run a read-only PDS server from CAR files
3
+
4
+
import { createServer } from 'node:http';
5
+
import { createReadOnlyServer } from './index.js';
6
+
import path from 'node:path';
7
+
import fs from 'node:fs';
8
+
import Database from 'better-sqlite3';
9
+
10
+
/**
11
+
* Parse command line arguments
12
+
* @returns {{ carFiles: string[], blobsDir: string | null, port: number, hostname: string, dataDir: string }}
13
+
*/
14
+
function parseArgs() {
15
+
const args = process.argv.slice(2);
16
+
/** @type {string[]} */
17
+
const carFiles = [];
18
+
let blobsDir = null;
19
+
let port = 3000;
20
+
let hostname = 'localhost';
21
+
let dataDir = './pds-data';
22
+
23
+
for (let i = 0; i < args.length; i++) {
24
+
const arg = args[i];
25
+
26
+
if (arg === '--car' || arg === '-c') {
27
+
const value = args[++i];
28
+
if (!value) {
29
+
console.error('Error: --car requires a path');
30
+
process.exit(1);
31
+
}
32
+
// Support glob patterns (Node.js 22+)
33
+
if (value.includes('*')) {
34
+
try {
35
+
// Use dynamic import for glob (Node.js 22+)
36
+
const { globSync } = fs;
37
+
if (typeof globSync === 'function') {
38
+
carFiles.push(...globSync(value));
39
+
} else {
40
+
console.error('Warning: Glob patterns not supported in this Node.js version, please specify files individually');
41
+
carFiles.push(value);
42
+
}
43
+
} catch {
44
+
console.error('Warning: Glob patterns not supported, please specify files individually');
45
+
carFiles.push(value);
46
+
}
47
+
} else {
48
+
carFiles.push(value);
49
+
}
50
+
} else if (arg === '--blobs' || arg === '-b') {
51
+
blobsDir = args[++i];
52
+
if (!blobsDir) {
53
+
console.error('Error: --blobs requires a path');
54
+
process.exit(1);
55
+
}
56
+
} else if (arg === '--port' || arg === '-p') {
57
+
port = parseInt(args[++i], 10);
58
+
if (isNaN(port)) {
59
+
console.error('Error: --port requires a number');
60
+
process.exit(1);
61
+
}
62
+
} else if (arg === '--hostname' || arg === '-H') {
63
+
hostname = args[++i];
64
+
if (!hostname) {
65
+
console.error('Error: --hostname requires a value');
66
+
process.exit(1);
67
+
}
68
+
} else if (arg === '--data-dir' || arg === '-d') {
69
+
dataDir = args[++i];
70
+
if (!dataDir) {
71
+
console.error('Error: --data-dir requires a path');
72
+
process.exit(1);
73
+
}
74
+
} else if (arg === '--help' || arg === '-h') {
75
+
printHelp();
76
+
process.exit(0);
77
+
} else if (arg === '--version' || arg === '-v') {
78
+
const pkg = JSON.parse(fs.readFileSync(new URL('../package.json', import.meta.url), 'utf-8'));
79
+
console.log(pkg.version);
80
+
process.exit(0);
81
+
} else if (arg.startsWith('-')) {
82
+
console.error(`Unknown option: ${arg}`);
83
+
printHelp();
84
+
process.exit(1);
85
+
} else {
86
+
// Positional arguments treated as CAR files
87
+
carFiles.push(arg);
88
+
}
89
+
}
90
+
91
+
// Check for CAR_DIR environment variable
92
+
const carDir = process.env.PDS_CAR_DIR;
93
+
if (carDir && carFiles.length === 0) {
94
+
const files = fs.readdirSync(carDir).filter((f) => f.endsWith('.car'));
95
+
carFiles.push(...files.map((f) => path.join(carDir, f)));
96
+
}
97
+
98
+
// Check for BLOBS_DIR environment variable
99
+
if (!blobsDir && process.env.PDS_BLOBS_DIR) {
100
+
blobsDir = process.env.PDS_BLOBS_DIR;
101
+
}
102
+
103
+
// Check for PORT environment variable
104
+
if (process.env.PDS_PORT) {
105
+
port = parseInt(process.env.PDS_PORT, 10);
106
+
}
107
+
108
+
// Check for HOSTNAME environment variable
109
+
if (process.env.PDS_HOSTNAME) {
110
+
hostname = process.env.PDS_HOSTNAME;
111
+
}
112
+
113
+
return { carFiles, blobsDir, port, hostname, dataDir };
114
+
}
115
+
116
+
function printHelp() {
117
+
console.log(`
118
+
pds-readonly - Run a read-only PDS server from CAR files
119
+
120
+
Usage:
121
+
pds-readonly [options] [car-files...]
122
+
123
+
Options:
124
+
--car, -c <path> Path to CAR file (can be specified multiple times)
125
+
--blobs, -b <path> Path to blobs directory (structure: blobs/{did}/{shard}/{cid})
126
+
--port, -p <number> Port to listen on (default: 3000)
127
+
--hostname, -H <host> PDS hostname for DID resolution (default: localhost)
128
+
--data-dir, -d <path> Directory for SQLite databases (default: ./pds-data)
129
+
--help, -h Show this help message
130
+
--version, -v Show version
131
+
132
+
Environment Variables:
133
+
PDS_PORT Port to listen on
134
+
PDS_CAR_DIR Directory containing CAR files
135
+
PDS_BLOBS_DIR Blobs directory path
136
+
PDS_HOSTNAME PDS hostname
137
+
138
+
Examples:
139
+
# Load single repository
140
+
pds-readonly --car ./repo.car --port 3000
141
+
142
+
# Load multiple repositories
143
+
pds-readonly --car ./alice.car --car ./bob.car
144
+
145
+
# With blobs support
146
+
pds-readonly --car ./repo.car --blobs ./blobs
147
+
148
+
# Using environment variables
149
+
PDS_CAR_DIR=./repos PDS_PORT=8080 pds-readonly
150
+
`);
151
+
}
152
+
153
+
async function main() {
154
+
const { carFiles, blobsDir, port, hostname, dataDir } = parseArgs();
155
+
156
+
if (carFiles.length === 0) {
157
+
console.error('Error: At least one CAR file is required');
158
+
console.error('Use --help for usage information');
159
+
process.exit(1);
160
+
}
161
+
162
+
// Validate CAR files exist
163
+
for (const carFile of carFiles) {
164
+
if (!fs.existsSync(carFile)) {
165
+
console.error(`Error: CAR file not found: ${carFile}`);
166
+
process.exit(1);
167
+
}
168
+
}
169
+
170
+
// Validate blobs directory if specified
171
+
if (blobsDir && !fs.existsSync(blobsDir)) {
172
+
console.warn(`Warning: Blobs directory not found: ${blobsDir}`);
173
+
console.warn('Blob requests will return 404');
174
+
}
175
+
176
+
console.log('Starting read-only PDS server...');
177
+
console.log(`Data directory: ${dataDir}`);
178
+
console.log(`Hostname: ${hostname}`);
179
+
if (blobsDir) {
180
+
console.log(`Blobs directory: ${blobsDir}`);
181
+
}
182
+
183
+
// Create the read-only server
184
+
const { server, loadCar } = createReadOnlyServer({
185
+
hostname,
186
+
dataDir,
187
+
blobsDir: blobsDir || undefined,
188
+
createDb: (dbPath) => new Database(dbPath),
189
+
});
190
+
191
+
// Load all CAR files
192
+
console.log(`\nLoading ${carFiles.length} CAR file(s)...`);
193
+
for (const carFile of carFiles) {
194
+
try {
195
+
console.log(`Loading: ${carFile}`);
196
+
await loadCar(carFile);
197
+
} catch (err) {
198
+
console.error(`Error loading ${carFile}:`, err.message);
199
+
process.exit(1);
200
+
}
201
+
}
202
+
203
+
// Create HTTP server
204
+
const httpServer = createServer(async (req, res) => {
205
+
try {
206
+
// Convert Node.js request to Web Request
207
+
const url = new URL(req.url || '/', `http://${hostname}:${port}`);
208
+
const headers = new Headers();
209
+
for (const [key, value] of Object.entries(req.headers)) {
210
+
if (value) {
211
+
headers.set(key, Array.isArray(value) ? value.join(', ') : value);
212
+
}
213
+
}
214
+
215
+
const body = req.method !== 'GET' && req.method !== 'HEAD'
216
+
? await readBody(req)
217
+
: undefined;
218
+
219
+
const request = new Request(url.toString(), {
220
+
method: req.method,
221
+
headers,
222
+
body,
223
+
});
224
+
225
+
// Handle request
226
+
const response = await server.fetch(request);
227
+
228
+
// Convert Web Response to Node.js response
229
+
res.statusCode = response.status;
230
+
for (const [key, value] of response.headers) {
231
+
res.setHeader(key, value);
232
+
}
233
+
234
+
if (response.body) {
235
+
const reader = response.body.getReader();
236
+
const pump = async () => {
237
+
const { done, value } = await reader.read();
238
+
if (done) {
239
+
res.end();
240
+
return;
241
+
}
242
+
res.write(value);
243
+
await pump();
244
+
};
245
+
await pump();
246
+
} else {
247
+
const text = await response.text();
248
+
res.end(text);
249
+
}
250
+
} catch (err) {
251
+
console.error('Request error:', err);
252
+
res.statusCode = 500;
253
+
res.end(JSON.stringify({ error: 'InternalServerError', message: err.message }));
254
+
}
255
+
});
256
+
257
+
httpServer.listen(port, () => {
258
+
console.log(`\nRead-only PDS server running at http://localhost:${port}`);
259
+
console.log('\nAvailable endpoints:');
260
+
console.log(` GET /xrpc/com.atproto.sync.listRepos`);
261
+
console.log(` GET /xrpc/com.atproto.sync.getRepo?did=<did>`);
262
+
console.log(` GET /xrpc/com.atproto.repo.listRecords?repo=<did>&collection=<nsid>`);
263
+
console.log(` GET /xrpc/com.atproto.repo.getRecord?repo=<did>&collection=<nsid>&rkey=<rkey>`);
264
+
console.log('\nWrite operations return 401 (AuthenticationRequired)');
265
+
});
266
+
267
+
// Handle shutdown
268
+
process.on('SIGINT', () => {
269
+
console.log('\nShutting down...');
270
+
httpServer.close();
271
+
process.exit(0);
272
+
});
273
+
274
+
process.on('SIGTERM', () => {
275
+
console.log('\nShutting down...');
276
+
httpServer.close();
277
+
process.exit(0);
278
+
});
279
+
}
280
+
281
+
/**
282
+
* Read request body as Uint8Array
283
+
* @param {import('node:http').IncomingMessage} req
284
+
* @returns {Promise<Uint8Array>}
285
+
*/
286
+
function readBody(req) {
287
+
return new Promise((resolve, reject) => {
288
+
/** @type {Buffer[]} */
289
+
const chunks = [];
290
+
req.on('data', (chunk) => chunks.push(chunk));
291
+
req.on('end', () => {
292
+
const buffer = Buffer.concat(chunks);
293
+
resolve(new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength));
294
+
});
295
+
req.on('error', reject);
296
+
});
297
+
}
298
+
299
+
main().catch((err) => {
300
+
console.error('Fatal error:', err);
301
+
process.exit(1);
302
+
});
+329
packages/readonly/src/index.js
+329
packages/readonly/src/index.js
···
1
+
// @pds/readonly - Read-only PDS server for serving AT Protocol repositories
2
+
// Loads repositories from CAR files and serves read-only XRPC endpoints
3
+
4
+
import { PersonalDataServer, loadRepositoryFromCar } from '@pds/core';
5
+
import { createActorStorage, createSharedStorage } from '@pds/storage-sqlite';
6
+
import { createFsBlobs } from '@pds/blobs-fs';
7
+
import fs from 'node:fs';
8
+
import path from 'node:path';
9
+
10
+
/**
11
+
* @typedef {import('@pds/storage-sqlite/driver').SQLiteDatabase} SQLiteDatabase
12
+
*/
13
+
14
+
/**
15
+
* @typedef {Object} RepoInfo
16
+
* @property {string} did - Repository DID
17
+
* @property {string} commitCid - Commit CID
18
+
* @property {string} rev - Revision string
19
+
* @property {number} recordCount - Number of records
20
+
* @property {number} blockCount - Number of blocks
21
+
*/
22
+
23
+
/**
24
+
* @typedef {Object} MultiRepoManagerOptions
25
+
* @property {string} dataDir - Directory for SQLite databases
26
+
* @property {string} [blobsDir] - Optional directory for blobs
27
+
* @property {(did: string) => SQLiteDatabase} createDb - Database factory function
28
+
*/
29
+
30
+
/**
31
+
* Multi-repository manager for read-only PDS
32
+
* Manages per-DID SQLite databases with a routing layer
33
+
*/
34
+
export class MultiRepoManager {
35
+
/**
36
+
* @param {MultiRepoManagerOptions} options
37
+
*/
38
+
constructor({ dataDir, blobsDir, createDb }) {
39
+
/** @type {Map<string, {db: SQLiteDatabase, storage: import('@pds/core/ports').ActorStoragePort}>} */
40
+
this.repos = new Map();
41
+
this.dataDir = dataDir;
42
+
this.blobsDir = blobsDir;
43
+
this.createDb = createDb;
44
+
/** @type {RepoInfo[]} */
45
+
this.loadedRepos = [];
46
+
}
47
+
48
+
/**
49
+
* Load a repository from a CAR file
50
+
* @param {string} carPath - Path to CAR file
51
+
* @returns {Promise<RepoInfo>}
52
+
*/
53
+
async loadCar(carPath) {
54
+
const carBytes = fs.readFileSync(carPath);
55
+
56
+
// Create temporary storage to extract DID first
57
+
const tempDb = this.createDb(':memory:');
58
+
const tempStorage = createActorStorage(tempDb);
59
+
60
+
// Load to get DID
61
+
const result = await loadRepositoryFromCar(new Uint8Array(carBytes), tempStorage);
62
+
const did = result.did;
63
+
64
+
// Check if already loaded
65
+
if (this.repos.has(did)) {
66
+
console.log(`Repository ${did} already loaded, skipping`);
67
+
return result;
68
+
}
69
+
70
+
// Create persistent database for this DID
71
+
const dbFileName = did.replace(/:/g, '_').replace(/\./g, '_') + '.sqlite';
72
+
const dbPath = path.join(this.dataDir, dbFileName);
73
+
const db = this.createDb(dbPath);
74
+
const storage = createActorStorage(db);
75
+
76
+
// Load repository into persistent storage
77
+
const loadResult = await loadRepositoryFromCar(new Uint8Array(carBytes), storage);
78
+
79
+
// Register
80
+
this.repos.set(did, { db, storage });
81
+
this.loadedRepos.push(loadResult);
82
+
83
+
console.log(
84
+
`Loaded repository ${did}: ${loadResult.recordCount} records, ${loadResult.blockCount} blocks`,
85
+
);
86
+
87
+
return loadResult;
88
+
}
89
+
90
+
/**
91
+
* Get storage for a specific DID
92
+
* @param {string} did
93
+
* @returns {import('@pds/core/ports').ActorStoragePort | null}
94
+
*/
95
+
getStorage(did) {
96
+
const repo = this.repos.get(did);
97
+
return repo?.storage || null;
98
+
}
99
+
100
+
/**
101
+
* Get all loaded DIDs
102
+
* @returns {string[]}
103
+
*/
104
+
getLoadedDids() {
105
+
return Array.from(this.repos.keys());
106
+
}
107
+
108
+
/**
109
+
* Get info about all loaded repositories
110
+
* @returns {RepoInfo[]}
111
+
*/
112
+
getLoadedRepos() {
113
+
return this.loadedRepos;
114
+
}
115
+
}
116
+
117
+
/**
118
+
* @typedef {Object} ReadOnlyServerOptions
119
+
* @property {string} hostname - PDS hostname
120
+
* @property {string} [jwtSecret] - JWT secret (optional for read-only)
121
+
* @property {string} dataDir - Directory for SQLite databases
122
+
* @property {string} [blobsDir] - Optional directory for blobs
123
+
* @property {(did: string) => SQLiteDatabase} createDb - Database factory function
124
+
*/
125
+
126
+
/**
127
+
* Create a read-only PDS server with multi-repo support
128
+
* @param {ReadOnlyServerOptions} options
129
+
* @returns {{ server: PersonalDataServer, manager: MultiRepoManager, loadCar: (path: string) => Promise<RepoInfo> }}
130
+
*/
131
+
export function createReadOnlyServer(options) {
132
+
const { hostname, jwtSecret = 'readonly-jwt-secret', dataDir, blobsDir, createDb } = options;
133
+
134
+
// Ensure data directory exists
135
+
if (!fs.existsSync(dataDir)) {
136
+
fs.mkdirSync(dataDir, { recursive: true });
137
+
}
138
+
139
+
// Create multi-repo manager
140
+
const manager = new MultiRepoManager({ dataDir, blobsDir, createDb });
141
+
142
+
// Create a shared storage database
143
+
const sharedDb = createDb(path.join(dataDir, 'shared.sqlite'));
144
+
const sharedStorage = createSharedStorage(sharedDb);
145
+
146
+
// Create blob storage if configured
147
+
const blobs = blobsDir
148
+
? createFsBlobs(blobsDir)
149
+
: createNullBlobStorage();
150
+
151
+
// Create a routing actor storage that delegates to the correct repo
152
+
const routingStorage = createRoutingStorage(manager);
153
+
154
+
// Create the PDS in read-only mode
155
+
const server = new PersonalDataServer({
156
+
actorStorage: routingStorage,
157
+
sharedStorage,
158
+
blobs,
159
+
jwtSecret,
160
+
hostname,
161
+
readOnly: true,
162
+
});
163
+
164
+
return {
165
+
server,
166
+
manager,
167
+
loadCar: (carPath) => manager.loadCar(carPath),
168
+
};
169
+
}
170
+
171
+
/**
172
+
* Create a routing storage that delegates to the correct repo based on DID
173
+
* @param {MultiRepoManager} manager
174
+
* @returns {import('@pds/core/ports').ActorStoragePort}
175
+
*/
176
+
function createRoutingStorage(manager) {
177
+
// Default to first loaded repo for single-repo compatibility
178
+
const getDefaultStorage = () => {
179
+
const dids = manager.getLoadedDids();
180
+
if (dids.length === 0) return null;
181
+
return manager.getStorage(dids[0]);
182
+
};
183
+
184
+
return {
185
+
async getRecord(uri) {
186
+
// Extract DID from uri: at://did:xxx/collection/rkey
187
+
const did = uri.replace('at://', '').split('/')[0];
188
+
const storage = manager.getStorage(did) || getDefaultStorage();
189
+
return storage?.getRecord(uri) || null;
190
+
},
191
+
192
+
async listRecords(collection, cursor, limit) {
193
+
// For listRecords, use default storage
194
+
const storage = getDefaultStorage();
195
+
return storage?.listRecords(collection, cursor, limit) || { records: [], cursor: null };
196
+
},
197
+
198
+
async listAllRecords() {
199
+
const storage = getDefaultStorage();
200
+
return storage?.listAllRecords() || [];
201
+
},
202
+
203
+
async putRecord() {
204
+
throw new Error('Read-only: putRecord not allowed');
205
+
},
206
+
207
+
async deleteRecord() {
208
+
throw new Error('Read-only: deleteRecord not allowed');
209
+
},
210
+
211
+
async getBlock(cid) {
212
+
// Try all repos
213
+
for (const did of manager.getLoadedDids()) {
214
+
const storage = manager.getStorage(did);
215
+
const block = await storage?.getBlock(cid);
216
+
if (block) return block;
217
+
}
218
+
return null;
219
+
},
220
+
221
+
async putBlock() {
222
+
throw new Error('Read-only: putBlock not allowed');
223
+
},
224
+
225
+
async getLatestCommit() {
226
+
const storage = getDefaultStorage();
227
+
return storage?.getLatestCommit() || null;
228
+
},
229
+
230
+
async putCommit() {
231
+
throw new Error('Read-only: putCommit not allowed');
232
+
},
233
+
234
+
async getEvents(cursor, limit) {
235
+
const storage = getDefaultStorage();
236
+
return storage?.getEvents(cursor, limit) || { events: [], cursor: 0 };
237
+
},
238
+
239
+
async putEvent() {
240
+
throw new Error('Read-only: putEvent not allowed');
241
+
},
242
+
243
+
async getBlob(cid) {
244
+
const storage = getDefaultStorage();
245
+
return storage?.getBlob(cid) || null;
246
+
},
247
+
248
+
async putBlob() {
249
+
throw new Error('Read-only: putBlob not allowed');
250
+
},
251
+
252
+
async deleteBlob() {
253
+
throw new Error('Read-only: deleteBlob not allowed');
254
+
},
255
+
256
+
async listBlobs(cursor, limit) {
257
+
const storage = getDefaultStorage();
258
+
return storage?.listBlobs(cursor, limit) || { cids: [], cursor: null };
259
+
},
260
+
261
+
async getOrphanedBlobs() {
262
+
return [];
263
+
},
264
+
265
+
async linkBlobToRecord() {
266
+
throw new Error('Read-only: linkBlobToRecord not allowed');
267
+
},
268
+
269
+
async unlinkBlobsFromRecord() {
270
+
throw new Error('Read-only: unlinkBlobsFromRecord not allowed');
271
+
},
272
+
273
+
async getDid() {
274
+
const storage = getDefaultStorage();
275
+
return storage?.getDid() || null;
276
+
},
277
+
278
+
async setDid() {
279
+
throw new Error('Read-only: setDid not allowed');
280
+
},
281
+
282
+
async getHandle() {
283
+
const storage = getDefaultStorage();
284
+
return storage?.getHandle() || null;
285
+
},
286
+
287
+
async setHandle() {
288
+
throw new Error('Read-only: setHandle not allowed');
289
+
},
290
+
291
+
async getPrivateKey() {
292
+
return null; // No private key in read-only mode
293
+
},
294
+
295
+
async setPrivateKey() {
296
+
throw new Error('Read-only: setPrivateKey not allowed');
297
+
},
298
+
299
+
async getPreferences() {
300
+
const storage = getDefaultStorage();
301
+
return storage?.getPreferences() || [];
302
+
},
303
+
304
+
async setPreferences() {
305
+
throw new Error('Read-only: setPreferences not allowed');
306
+
},
307
+
};
308
+
}
309
+
310
+
/**
311
+
* Create a null blob storage that returns empty results
312
+
* @returns {import('@pds/core/ports').BlobPort}
313
+
*/
314
+
function createNullBlobStorage() {
315
+
return {
316
+
async put() {
317
+
throw new Error('Read-only: put not allowed');
318
+
},
319
+
async get() {
320
+
return null;
321
+
},
322
+
async delete() {
323
+
throw new Error('Read-only: delete not allowed');
324
+
},
325
+
};
326
+
}
327
+
328
+
export { loadRepositoryFromCar } from '@pds/core';
329
+
export { buildRepositoryCar } from './build-car.js';
+15
pnpm-lock.yaml
+15
pnpm-lock.yaml
···
148
148
specifier: ^8.19.0
149
149
version: 8.19.0
150
150
151
+
packages/readonly:
152
+
dependencies:
153
+
'@pds/blobs-fs':
154
+
specifier: workspace:*
155
+
version: link:../blobs-fs
156
+
'@pds/core':
157
+
specifier: workspace:*
158
+
version: link:../core
159
+
'@pds/storage-sqlite':
160
+
specifier: workspace:*
161
+
version: link:../storage-sqlite
162
+
better-sqlite3:
163
+
specifier: '>=9.0.0'
164
+
version: 12.6.0
165
+
151
166
packages/storage-sqlite:
152
167
dependencies:
153
168
better-sqlite3:
+237
test/build-car.test.js
+237
test/build-car.test.js
···
1
+
// @pds/readonly/build-car tests
2
+
import { describe, it, expect } from 'vitest';
3
+
import { buildRepositoryCar } from '../packages/readonly/src/build-car.js';
4
+
import { parseCarFile } from '../packages/core/src/car.js';
5
+
import { loadRepositoryFromCar } from '../packages/core/src/loader.js';
6
+
import { cborDecode } from '../packages/core/src/repo.js';
7
+
import { generateKeyPair } from '../packages/core/src/crypto.js';
8
+
9
+
// Mock storage for loading the CAR
10
+
function createMockStorage() {
11
+
const blocks = new Map();
12
+
const records = new Map();
13
+
const commits = [];
14
+
const metadata = { did: null };
15
+
16
+
return {
17
+
async getBlock(cid) { return blocks.get(cid) || null; },
18
+
async putBlock(cid, data) { blocks.set(cid, data); },
19
+
async getRecord(uri) {
20
+
const rec = records.get(uri);
21
+
return rec || null;
22
+
},
23
+
async putRecord(uri, cid, collection, rkey, value) {
24
+
records.set(uri, { cid, value });
25
+
},
26
+
async listRecords() { return { records: [], cursor: null }; },
27
+
async listAllRecords() { return []; },
28
+
async deleteRecord() {},
29
+
async getLatestCommit() {
30
+
return commits.length > 0 ? commits[commits.length - 1] : null;
31
+
},
32
+
async putCommit(seq, cid, rev, prev) {
33
+
commits.push({ seq, cid, rev, prev });
34
+
},
35
+
async getEvents() { return { events: [], cursor: 0 }; },
36
+
async putEvent() {},
37
+
async getBlob() { return null; },
38
+
async putBlob() {},
39
+
async deleteBlob() {},
40
+
async listBlobs() { return { cids: [], cursor: null }; },
41
+
async getOrphanedBlobs() { return []; },
42
+
async linkBlobToRecord() {},
43
+
async unlinkBlobsFromRecord() {},
44
+
async getDid() { return metadata.did; },
45
+
async setDid(did) { metadata.did = did; },
46
+
async getHandle() { return null; },
47
+
async setHandle() {},
48
+
async getPrivateKey() { return null; },
49
+
async setPrivateKey() {},
50
+
async getPreferences() { return []; },
51
+
async setPreferences() {},
52
+
_getRecords() { return records; },
53
+
_getCommits() { return commits; },
54
+
};
55
+
}
56
+
57
+
describe('buildRepositoryCar', () => {
58
+
it('should build a valid CAR with a single record', async () => {
59
+
const { privateKey } = await generateKeyPair();
60
+
const did = 'did:plc:testbuild123';
61
+
62
+
const { carBytes, commitCid, rev } = await buildRepositoryCar({
63
+
did,
64
+
privateKey,
65
+
records: [
66
+
{
67
+
collection: 'app.bsky.feed.post',
68
+
rkey: '3abc123',
69
+
record: {
70
+
$type: 'app.bsky.feed.post',
71
+
text: 'Hello World!',
72
+
createdAt: '2024-01-01T00:00:00Z',
73
+
},
74
+
},
75
+
],
76
+
});
77
+
78
+
expect(carBytes).toBeInstanceOf(Uint8Array);
79
+
expect(carBytes.length).toBeGreaterThan(0);
80
+
expect(commitCid).toBeDefined();
81
+
expect(rev).toBeDefined();
82
+
});
83
+
84
+
it('should build a valid CAR with multiple records', async () => {
85
+
const { privateKey } = await generateKeyPair();
86
+
const did = 'did:plc:testmulti456';
87
+
88
+
const { carBytes, commitCid } = await buildRepositoryCar({
89
+
did,
90
+
privateKey,
91
+
records: [
92
+
{
93
+
collection: 'app.bsky.feed.post',
94
+
rkey: '3post1',
95
+
record: { $type: 'app.bsky.feed.post', text: 'Post 1', createdAt: '2024-01-01T00:00:00Z' },
96
+
},
97
+
{
98
+
collection: 'app.bsky.feed.post',
99
+
rkey: '3post2',
100
+
record: { $type: 'app.bsky.feed.post', text: 'Post 2', createdAt: '2024-01-02T00:00:00Z' },
101
+
},
102
+
{
103
+
collection: 'app.bsky.actor.profile',
104
+
rkey: 'self',
105
+
record: { $type: 'app.bsky.actor.profile', displayName: 'Test User' },
106
+
},
107
+
],
108
+
});
109
+
110
+
expect(carBytes).toBeInstanceOf(Uint8Array);
111
+
expect(commitCid).toBeDefined();
112
+
});
113
+
114
+
it('should produce a CAR that can be parsed', async () => {
115
+
const { privateKey } = await generateKeyPair();
116
+
const did = 'did:plc:testparse789';
117
+
118
+
const { carBytes, commitCid } = await buildRepositoryCar({
119
+
did,
120
+
privateKey,
121
+
records: [
122
+
{
123
+
collection: 'app.bsky.feed.post',
124
+
rkey: '3test',
125
+
record: { $type: 'app.bsky.feed.post', text: 'Test', createdAt: '2024-01-01T00:00:00Z' },
126
+
},
127
+
],
128
+
});
129
+
130
+
// Parse the CAR
131
+
const parsed = parseCarFile(carBytes);
132
+
133
+
expect(parsed.roots).toHaveLength(1);
134
+
expect(parsed.roots[0]).toBe(commitCid);
135
+
expect(parsed.blocks.size).toBeGreaterThan(0);
136
+
137
+
// Verify commit structure
138
+
const commitBytes = parsed.blocks.get(commitCid);
139
+
expect(commitBytes).toBeDefined();
140
+
141
+
const commit = cborDecode(commitBytes);
142
+
expect(commit.did).toBe(did);
143
+
expect(commit.version).toBe(3);
144
+
expect(commit.sig).toBeDefined();
145
+
expect(commit.sig).toBeInstanceOf(Uint8Array);
146
+
expect(commit.sig.length).toBe(64); // ECDSA P-256 signature
147
+
});
148
+
149
+
it('should produce a CAR that can be loaded into storage', async () => {
150
+
const { privateKey } = await generateKeyPair();
151
+
const did = 'did:plc:testload101';
152
+
153
+
const { carBytes } = await buildRepositoryCar({
154
+
did,
155
+
privateKey,
156
+
records: [
157
+
{
158
+
collection: 'app.bsky.feed.post',
159
+
rkey: '3loadtest',
160
+
record: {
161
+
$type: 'app.bsky.feed.post',
162
+
text: 'Load Test',
163
+
createdAt: '2024-01-01T00:00:00Z',
164
+
},
165
+
},
166
+
],
167
+
});
168
+
169
+
// Load into storage
170
+
const storage = createMockStorage();
171
+
const result = await loadRepositoryFromCar(carBytes, storage);
172
+
173
+
expect(result.did).toBe(did);
174
+
expect(result.recordCount).toBe(1);
175
+
expect(await storage.getDid()).toBe(did);
176
+
177
+
// Verify record was stored
178
+
const record = await storage.getRecord(`at://${did}/app.bsky.feed.post/3loadtest`);
179
+
expect(record).not.toBeNull();
180
+
expect(record?.cid).toBeDefined();
181
+
182
+
// Verify commit was stored
183
+
const commit = await storage.getLatestCommit();
184
+
expect(commit).not.toBeNull();
185
+
expect(commit?.cid).toBe(result.commitCid);
186
+
});
187
+
188
+
it('should handle empty records gracefully', async () => {
189
+
const { privateKey } = await generateKeyPair();
190
+
const did = 'did:plc:testempty';
191
+
192
+
// Even with no records, should produce valid CAR with just commit
193
+
const { carBytes, commitCid } = await buildRepositoryCar({
194
+
did,
195
+
privateKey,
196
+
records: [],
197
+
});
198
+
199
+
expect(carBytes).toBeInstanceOf(Uint8Array);
200
+
expect(commitCid).toBeDefined();
201
+
202
+
const parsed = parseCarFile(carBytes);
203
+
expect(parsed.roots[0]).toBe(commitCid);
204
+
});
205
+
206
+
it('should produce deterministic MST structure', async () => {
207
+
const { privateKey } = await generateKeyPair();
208
+
const did = 'did:plc:testdeterministic';
209
+
210
+
// Records in different order should produce same MST
211
+
const records = [
212
+
{ collection: 'c/b', rkey: 'z', record: { val: 3 } },
213
+
{ collection: 'a/a', rkey: 'x', record: { val: 1 } },
214
+
{ collection: 'b/a', rkey: 'y', record: { val: 2 } },
215
+
];
216
+
217
+
const { carBytes: car1 } = await buildRepositoryCar({
218
+
did,
219
+
privateKey,
220
+
records: [...records],
221
+
});
222
+
223
+
// Reverse order
224
+
const { carBytes: car2 } = await buildRepositoryCar({
225
+
did,
226
+
privateKey,
227
+
records: [...records].reverse(),
228
+
});
229
+
230
+
// Parse both and compare MST structure (not commits - revisions differ)
231
+
const parsed1 = parseCarFile(car1);
232
+
const parsed2 = parseCarFile(car2);
233
+
234
+
// Both should have same number of blocks (records + MST nodes + commit)
235
+
expect(parsed1.blocks.size).toBe(parsed2.blocks.size);
236
+
});
237
+
});
+155
test/car.test.js
+155
test/car.test.js
···
1
+
// @pds/core/car tests - CAR file parser
2
+
import { describe, it, expect } from 'vitest';
3
+
import {
4
+
readVarint,
5
+
parseCarFile,
6
+
iterateCarBlocks,
7
+
getCarHeader,
8
+
} from '../packages/core/src/car.js';
9
+
import {
10
+
buildCarFile,
11
+
cborEncodeDagCbor,
12
+
createCid,
13
+
cidToString,
14
+
} from '../packages/core/src/repo.js';
15
+
16
+
describe('readVarint', () => {
17
+
it('should decode single-byte varints', () => {
18
+
// 0x00 = 0
19
+
expect(readVarint(new Uint8Array([0x00]), 0)).toEqual([0, 1]);
20
+
// 0x01 = 1
21
+
expect(readVarint(new Uint8Array([0x01]), 0)).toEqual([1, 1]);
22
+
// 0x7f = 127
23
+
expect(readVarint(new Uint8Array([0x7f]), 0)).toEqual([127, 1]);
24
+
});
25
+
26
+
it('should decode multi-byte varints', () => {
27
+
// 0x80 0x01 = 128
28
+
expect(readVarint(new Uint8Array([0x80, 0x01]), 0)).toEqual([128, 2]);
29
+
// 0xff 0x01 = 255
30
+
expect(readVarint(new Uint8Array([0xff, 0x01]), 0)).toEqual([255, 2]);
31
+
// 0xac 0x02 = 300
32
+
expect(readVarint(new Uint8Array([0xac, 0x02]), 0)).toEqual([300, 2]);
33
+
});
34
+
35
+
it('should decode varint at offset', () => {
36
+
const bytes = new Uint8Array([0xff, 0x7f, 0xac, 0x02, 0x00]);
37
+
expect(readVarint(bytes, 2)).toEqual([300, 4]);
38
+
});
39
+
40
+
it('should throw on unterminated varint', () => {
41
+
expect(() => readVarint(new Uint8Array([0x80]), 0)).toThrow(
42
+
'Unexpected end of varint',
43
+
);
44
+
});
45
+
});
46
+
47
+
describe('parseCarFile', () => {
48
+
it('should roundtrip with buildCarFile - single block', async () => {
49
+
// Create a test block
50
+
const record = { $type: 'app.bsky.feed.post', text: 'Hello World' };
51
+
const recordBytes = cborEncodeDagCbor(record);
52
+
const recordCid = cidToString(await createCid(recordBytes));
53
+
54
+
// Build CAR file
55
+
const car = buildCarFile(recordCid, [{ cid: recordCid, data: recordBytes }]);
56
+
57
+
// Parse it back
58
+
const parsed = parseCarFile(car);
59
+
60
+
expect(parsed.roots).toHaveLength(1);
61
+
expect(parsed.roots[0]).toBe(recordCid);
62
+
expect(parsed.blocks.size).toBe(1);
63
+
expect(parsed.blocks.has(recordCid)).toBe(true);
64
+
expect(parsed.blocks.get(recordCid)).toEqual(recordBytes);
65
+
});
66
+
67
+
it('should roundtrip with buildCarFile - multiple blocks', async () => {
68
+
// Create multiple test blocks
69
+
const block1 = cborEncodeDagCbor({ text: 'Block 1' });
70
+
const cid1 = cidToString(await createCid(block1));
71
+
72
+
const block2 = cborEncodeDagCbor({ text: 'Block 2' });
73
+
const cid2 = cidToString(await createCid(block2));
74
+
75
+
const block3 = cborEncodeDagCbor({ text: 'Block 3' });
76
+
const cid3 = cidToString(await createCid(block3));
77
+
78
+
// Build CAR file with first block as root
79
+
const car = buildCarFile(cid1, [
80
+
{ cid: cid1, data: block1 },
81
+
{ cid: cid2, data: block2 },
82
+
{ cid: cid3, data: block3 },
83
+
]);
84
+
85
+
// Parse it back
86
+
const parsed = parseCarFile(car);
87
+
88
+
expect(parsed.roots).toHaveLength(1);
89
+
expect(parsed.roots[0]).toBe(cid1);
90
+
expect(parsed.blocks.size).toBe(3);
91
+
expect(parsed.blocks.get(cid1)).toEqual(block1);
92
+
expect(parsed.blocks.get(cid2)).toEqual(block2);
93
+
expect(parsed.blocks.get(cid3)).toEqual(block3);
94
+
});
95
+
96
+
it('should throw on invalid CAR version', () => {
97
+
// Create a malformed CAR with version 2
98
+
const invalidHeader = cborEncodeDagCbor({ version: 2, roots: [] });
99
+
const headerLenBytes = new Uint8Array([invalidHeader.length]);
100
+
const car = new Uint8Array(
101
+
headerLenBytes.length + invalidHeader.length,
102
+
);
103
+
car.set(headerLenBytes, 0);
104
+
car.set(invalidHeader, headerLenBytes.length);
105
+
106
+
expect(() => parseCarFile(car)).toThrow('Unsupported CAR version: 2');
107
+
});
108
+
});
109
+
110
+
describe('iterateCarBlocks', () => {
111
+
it('should iterate blocks in order', async () => {
112
+
// Create test blocks
113
+
const block1 = cborEncodeDagCbor({ index: 1 });
114
+
const cid1 = cidToString(await createCid(block1));
115
+
116
+
const block2 = cborEncodeDagCbor({ index: 2 });
117
+
const cid2 = cidToString(await createCid(block2));
118
+
119
+
// Build CAR file
120
+
const car = buildCarFile(cid1, [
121
+
{ cid: cid1, data: block1 },
122
+
{ cid: cid2, data: block2 },
123
+
]);
124
+
125
+
// Iterate blocks
126
+
const blocks = [];
127
+
for (const block of iterateCarBlocks(car)) {
128
+
blocks.push(block);
129
+
}
130
+
131
+
expect(blocks).toHaveLength(2);
132
+
expect(blocks[0].cid).toBe(cid1);
133
+
expect(blocks[0].data).toEqual(block1);
134
+
expect(blocks[1].cid).toBe(cid2);
135
+
expect(blocks[1].data).toEqual(block2);
136
+
});
137
+
});
138
+
139
+
describe('getCarHeader', () => {
140
+
it('should extract header without parsing blocks', async () => {
141
+
// Create a large block (to verify we don't parse it)
142
+
const largeData = cborEncodeDagCbor({ data: 'x'.repeat(10000) });
143
+
const largeCid = cidToString(await createCid(largeData));
144
+
145
+
// Build CAR file
146
+
const car = buildCarFile(largeCid, [{ cid: largeCid, data: largeData }]);
147
+
148
+
// Get header only
149
+
const header = getCarHeader(car);
150
+
151
+
expect(header.version).toBe(1);
152
+
expect(header.roots).toHaveLength(1);
153
+
expect(header.roots[0]).toBe(largeCid);
154
+
});
155
+
});
+317
test/loader.test.js
+317
test/loader.test.js
···
1
+
// @pds/core/loader tests - Repository loader from CAR files
2
+
import { describe, it, expect, beforeEach } from 'vitest';
3
+
import {
4
+
loadRepositoryFromCar,
5
+
getCarRepoInfo,
6
+
validateCarFile,
7
+
} from '../packages/core/src/loader.js';
8
+
import {
9
+
buildCarFile,
10
+
CID,
11
+
cborEncodeDagCbor,
12
+
createCid,
13
+
cidToString,
14
+
cidToBytes,
15
+
createTid,
16
+
} from '../packages/core/src/repo.js';
17
+
import { buildMst } from '../packages/core/src/mst.js';
18
+
19
+
// In-memory storage for tests
20
+
function createMockStorage() {
21
+
/** @type {Map<string, Uint8Array>} */
22
+
const blocks = new Map();
23
+
/** @type {Map<string, {cid: string, value: Uint8Array}>} */
24
+
const records = new Map();
25
+
/** @type {Array<{seq: number, cid: string, rev: string, prev: string|null}>} */
26
+
const commits = [];
27
+
/** @type {{did: string|null, handle: string|null, privateKey: Uint8Array|null}} */
28
+
const metadata = { did: null, handle: null, privateKey: null };
29
+
30
+
return {
31
+
// Block operations
32
+
async getBlock(cid) {
33
+
return blocks.get(cid) || null;
34
+
},
35
+
async putBlock(cid, data) {
36
+
blocks.set(cid, data);
37
+
},
38
+
39
+
// Record operations
40
+
async getRecord(uri) {
41
+
const rec = records.get(uri);
42
+
return rec || null;
43
+
},
44
+
async putRecord(uri, cid, collection, rkey, value) {
45
+
records.set(uri, { cid, value });
46
+
},
47
+
async listRecords(collection, cursor, limit) {
48
+
const all = [];
49
+
for (const [uri, rec] of records) {
50
+
if (uri.includes(`/${collection}/`)) {
51
+
const parts = uri.split('/');
52
+
all.push({ uri, cid: rec.cid, value: rec.value, rkey: parts[parts.length - 1] });
53
+
}
54
+
}
55
+
return { records: all.slice(0, limit), cursor: null };
56
+
},
57
+
async listAllRecords() {
58
+
const all = [];
59
+
for (const [uri, rec] of records) {
60
+
const parts = uri.replace('at://', '').split('/');
61
+
const key = `${parts[1]}/${parts[2]}`;
62
+
all.push({ key, cid: rec.cid });
63
+
}
64
+
return all;
65
+
},
66
+
async deleteRecord(uri) {
67
+
records.delete(uri);
68
+
},
69
+
70
+
// Commit operations
71
+
async getLatestCommit() {
72
+
return commits.length > 0 ? commits[commits.length - 1] : null;
73
+
},
74
+
async putCommit(seq, cid, rev, prev) {
75
+
commits.push({ seq, cid, rev, prev });
76
+
},
77
+
78
+
// Metadata operations
79
+
async getDid() {
80
+
return metadata.did;
81
+
},
82
+
async setDid(did) {
83
+
metadata.did = did;
84
+
},
85
+
async getHandle() {
86
+
return metadata.handle;
87
+
},
88
+
async setHandle(handle) {
89
+
metadata.handle = handle;
90
+
},
91
+
async getPrivateKey() {
92
+
return metadata.privateKey;
93
+
},
94
+
async setPrivateKey(key) {
95
+
metadata.privateKey = key;
96
+
},
97
+
async getPreferences() {
98
+
return [];
99
+
},
100
+
async setPreferences() {},
101
+
102
+
// Events
103
+
async getEvents() {
104
+
return { events: [], cursor: 0 };
105
+
},
106
+
async putEvent() {},
107
+
108
+
// Blobs
109
+
async getBlob() {
110
+
return null;
111
+
},
112
+
async putBlob() {},
113
+
async deleteBlob() {},
114
+
async listBlobs() {
115
+
return { cids: [], cursor: null };
116
+
},
117
+
async getOrphanedBlobs() {
118
+
return [];
119
+
},
120
+
async linkBlobToRecord() {},
121
+
async unlinkBlobsFromRecord() {},
122
+
123
+
// Test helpers
124
+
_getBlockCount() {
125
+
return blocks.size;
126
+
},
127
+
_getRecordCount() {
128
+
return records.size;
129
+
},
130
+
_getCommits() {
131
+
return commits;
132
+
},
133
+
};
134
+
}
135
+
136
+
/**
137
+
* Build a test CAR file with records
138
+
* @param {string} did
139
+
* @param {Array<{collection: string, rkey: string, record: object}>} recordsToInclude
140
+
* @returns {Promise<Uint8Array>}
141
+
*/
142
+
async function buildTestCar(did, recordsToInclude) {
143
+
/** @type {Array<{cid: string, data: Uint8Array}>} */
144
+
const blocks = [];
145
+
/** @type {Map<string, Uint8Array>} */
146
+
const blockMap = new Map();
147
+
148
+
// Encode records and build MST entries
149
+
/** @type {Array<{key: string, cid: string}>} */
150
+
const mstEntries = [];
151
+
152
+
for (const { collection, rkey, record } of recordsToInclude) {
153
+
const recordBytes = cborEncodeDagCbor(record);
154
+
const recordCid = cidToString(await createCid(recordBytes));
155
+
blocks.push({ cid: recordCid, data: recordBytes });
156
+
blockMap.set(recordCid, recordBytes);
157
+
mstEntries.push({ key: `${collection}/${rkey}`, cid: recordCid });
158
+
}
159
+
160
+
// Sort entries for MST
161
+
mstEntries.sort((a, b) => a.key.localeCompare(b.key));
162
+
163
+
// Build MST
164
+
const mstRoot = await buildMst(mstEntries, async (cid, data) => {
165
+
blocks.push({ cid, data });
166
+
blockMap.set(cid, data);
167
+
});
168
+
169
+
// Build commit
170
+
const rev = createTid();
171
+
const commit = {
172
+
did,
173
+
version: 3,
174
+
rev,
175
+
prev: null,
176
+
data: mstRoot ? new CID(cidToBytes(mstRoot)) : null,
177
+
};
178
+
179
+
// Add signature field (empty for test)
180
+
const signedCommit = { ...commit, sig: new Uint8Array(64) };
181
+
const commitBytes = cborEncodeDagCbor(signedCommit);
182
+
const commitCid = cidToString(await createCid(commitBytes));
183
+
blocks.push({ cid: commitCid, data: commitBytes });
184
+
185
+
return buildCarFile(commitCid, blocks);
186
+
}
187
+
188
+
describe('loadRepositoryFromCar', () => {
189
+
let storage;
190
+
191
+
beforeEach(() => {
192
+
storage = createMockStorage();
193
+
});
194
+
195
+
it('should load empty repository', async () => {
196
+
const did = 'did:plc:test123';
197
+
const car = await buildTestCar(did, []);
198
+
199
+
const result = await loadRepositoryFromCar(car, storage);
200
+
201
+
expect(result.did).toBe(did);
202
+
expect(result.recordCount).toBe(0);
203
+
expect(result.blockCount).toBeGreaterThan(0);
204
+
expect(await storage.getDid()).toBe(did);
205
+
});
206
+
207
+
it('should load repository with single record', async () => {
208
+
const did = 'did:plc:test123';
209
+
const car = await buildTestCar(did, [
210
+
{
211
+
collection: 'app.bsky.feed.post',
212
+
rkey: '3abc123',
213
+
record: { $type: 'app.bsky.feed.post', text: 'Hello World', createdAt: '2024-01-01T00:00:00Z' },
214
+
},
215
+
]);
216
+
217
+
const result = await loadRepositoryFromCar(car, storage);
218
+
219
+
expect(result.did).toBe(did);
220
+
expect(result.recordCount).toBe(1);
221
+
222
+
const record = await storage.getRecord(`at://${did}/app.bsky.feed.post/3abc123`);
223
+
expect(record).not.toBeNull();
224
+
expect(record?.cid).toBeDefined();
225
+
});
226
+
227
+
it('should load repository with multiple records', async () => {
228
+
const did = 'did:plc:test456';
229
+
const car = await buildTestCar(did, [
230
+
{
231
+
collection: 'app.bsky.feed.post',
232
+
rkey: '3post1',
233
+
record: { $type: 'app.bsky.feed.post', text: 'Post 1', createdAt: '2024-01-01T00:00:00Z' },
234
+
},
235
+
{
236
+
collection: 'app.bsky.feed.post',
237
+
rkey: '3post2',
238
+
record: { $type: 'app.bsky.feed.post', text: 'Post 2', createdAt: '2024-01-02T00:00:00Z' },
239
+
},
240
+
{
241
+
collection: 'app.bsky.actor.profile',
242
+
rkey: 'self',
243
+
record: { $type: 'app.bsky.actor.profile', displayName: 'Test User' },
244
+
},
245
+
]);
246
+
247
+
const result = await loadRepositoryFromCar(car, storage);
248
+
249
+
expect(result.did).toBe(did);
250
+
expect(result.recordCount).toBe(3);
251
+
252
+
// Verify all records exist
253
+
expect(await storage.getRecord(`at://${did}/app.bsky.feed.post/3post1`)).not.toBeNull();
254
+
expect(await storage.getRecord(`at://${did}/app.bsky.feed.post/3post2`)).not.toBeNull();
255
+
expect(await storage.getRecord(`at://${did}/app.bsky.actor.profile/self`)).not.toBeNull();
256
+
});
257
+
258
+
it('should create commit in storage', async () => {
259
+
const did = 'did:plc:test789';
260
+
const car = await buildTestCar(did, [
261
+
{
262
+
collection: 'app.bsky.feed.post',
263
+
rkey: '3test',
264
+
record: { $type: 'app.bsky.feed.post', text: 'Test', createdAt: '2024-01-01T00:00:00Z' },
265
+
},
266
+
]);
267
+
268
+
const result = await loadRepositoryFromCar(car, storage);
269
+
const commit = await storage.getLatestCommit();
270
+
271
+
expect(commit).not.toBeNull();
272
+
expect(commit?.cid).toBe(result.commitCid);
273
+
expect(commit?.rev).toBe(result.rev);
274
+
expect(commit?.seq).toBe(1);
275
+
});
276
+
});
277
+
278
+
describe('getCarRepoInfo', () => {
279
+
it('should extract DID and commit info', async () => {
280
+
const did = 'did:plc:infotest';
281
+
const car = await buildTestCar(did, [
282
+
{
283
+
collection: 'app.bsky.feed.post',
284
+
rkey: '3test',
285
+
record: { $type: 'app.bsky.feed.post', text: 'Test', createdAt: '2024-01-01T00:00:00Z' },
286
+
},
287
+
]);
288
+
289
+
const info = getCarRepoInfo(car);
290
+
291
+
expect(info.did).toBe(did);
292
+
expect(info.commitCid).toBeDefined();
293
+
expect(info.rev).toBeDefined();
294
+
});
295
+
});
296
+
297
+
describe('validateCarFile', () => {
298
+
it('should validate correct CAR file', async () => {
299
+
const did = 'did:plc:validtest';
300
+
const car = await buildTestCar(did, []);
301
+
302
+
const result = validateCarFile(car);
303
+
304
+
expect(result.valid).toBe(true);
305
+
expect(result.did).toBe(did);
306
+
expect(result.error).toBeUndefined();
307
+
});
308
+
309
+
it('should reject malformed CAR', () => {
310
+
const garbage = new Uint8Array([0xff, 0xff, 0xff]);
311
+
312
+
const result = validateCarFile(garbage);
313
+
314
+
expect(result.valid).toBe(false);
315
+
expect(result.error).toBeDefined();
316
+
});
317
+
});
+317
test/readonly.test.js
+317
test/readonly.test.js
···
1
+
// Read-only mode tests
2
+
import { describe, it, expect, beforeEach } from 'vitest';
3
+
import { PersonalDataServer } from '../packages/core/src/pds.js';
4
+
5
+
// Create minimal mock storage
6
+
function createMockStorage() {
7
+
return {
8
+
async getRecord() { return null; },
9
+
async putRecord() {},
10
+
async listRecords() { return { records: [], cursor: null }; },
11
+
async listAllRecords() { return []; },
12
+
async deleteRecord() {},
13
+
async getBlock() { return null; },
14
+
async putBlock() {},
15
+
async getLatestCommit() { return { seq: 1, cid: 'bafytest', rev: '3abc' }; },
16
+
async putCommit() {},
17
+
async getEvents() { return { events: [], cursor: 0 }; },
18
+
async putEvent() {},
19
+
async getBlob() { return null; },
20
+
async putBlob() {},
21
+
async deleteBlob() {},
22
+
async listBlobs() { return { cids: [], cursor: null }; },
23
+
async getOrphanedBlobs() { return []; },
24
+
async linkBlobToRecord() {},
25
+
async unlinkBlobsFromRecord() {},
26
+
async getDid() { return 'did:plc:test123'; },
27
+
async setDid() {},
28
+
async getHandle() { return 'test.handle'; },
29
+
async setHandle() {},
30
+
async getPrivateKey() { return null; },
31
+
async setPrivateKey() {},
32
+
async getPreferences() { return []; },
33
+
async setPreferences() {},
34
+
};
35
+
}
36
+
37
+
function createMockSharedStorage() {
38
+
return {
39
+
async getActor() { return null; },
40
+
async resolveHandle() { return null; },
41
+
async putActor() {},
42
+
async deleteActor() {},
43
+
async getOAuthRequest() { return null; },
44
+
async putOAuthRequest() {},
45
+
async deleteOAuthRequest() {},
46
+
async getOAuthToken() { return null; },
47
+
async putOAuthToken() {},
48
+
async deleteOAuthToken() {},
49
+
async listOAuthTokensByDid() { return []; },
50
+
async checkAndStoreDpopJti() { return true; },
51
+
async cleanupExpiredDpopJtis() {},
52
+
};
53
+
}
54
+
55
+
function createMockBlobs() {
56
+
return {
57
+
async put() {},
58
+
async get() { return null; },
59
+
async delete() {},
60
+
async has() { return false; },
61
+
async list() { return { cids: [], cursor: null }; },
62
+
};
63
+
}
64
+
65
+
describe('Read-only mode', () => {
66
+
/** @type {PersonalDataServer} */
67
+
let pds;
68
+
69
+
beforeEach(() => {
70
+
pds = new PersonalDataServer({
71
+
actorStorage: createMockStorage(),
72
+
sharedStorage: createMockSharedStorage(),
73
+
blobs: createMockBlobs(),
74
+
jwtSecret: 'test-secret',
75
+
readOnly: true,
76
+
});
77
+
});
78
+
79
+
it('should allow GET requests (describeServer)', async () => {
80
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.server.describeServer');
81
+
const response = await pds.fetch(request);
82
+
expect(response.status).toBe(200);
83
+
});
84
+
85
+
it('should allow GET requests (listRepos)', async () => {
86
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.sync.listRepos');
87
+
const response = await pds.fetch(request);
88
+
expect(response.status).toBe(200);
89
+
});
90
+
91
+
it('should reject createSession with 401', async () => {
92
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.server.createSession', {
93
+
method: 'POST',
94
+
headers: { 'Content-Type': 'application/json' },
95
+
body: JSON.stringify({ identifier: 'test', password: 'pass' }),
96
+
});
97
+
const response = await pds.fetch(request);
98
+
expect(response.status).toBe(401);
99
+
const body = await response.json();
100
+
expect(body.error).toBe('AuthenticationRequired');
101
+
expect(body.message).toBe('This PDS is read-only');
102
+
});
103
+
104
+
it('should reject refreshSession with 401', async () => {
105
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.server.refreshSession', {
106
+
method: 'POST',
107
+
headers: { 'Authorization': 'Bearer test-token' },
108
+
});
109
+
const response = await pds.fetch(request);
110
+
expect(response.status).toBe(401);
111
+
const body = await response.json();
112
+
expect(body.error).toBe('AuthenticationRequired');
113
+
});
114
+
115
+
it('should reject createRecord with 401', async () => {
116
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.repo.createRecord', {
117
+
method: 'POST',
118
+
headers: {
119
+
'Content-Type': 'application/json',
120
+
'Authorization': 'Bearer test-token',
121
+
},
122
+
body: JSON.stringify({
123
+
repo: 'did:plc:test',
124
+
collection: 'app.bsky.feed.post',
125
+
record: { text: 'test' },
126
+
}),
127
+
});
128
+
const response = await pds.fetch(request);
129
+
expect(response.status).toBe(401);
130
+
const body = await response.json();
131
+
expect(body.error).toBe('AuthenticationRequired');
132
+
expect(body.message).toBe('This PDS is read-only');
133
+
});
134
+
135
+
it('should reject putRecord with 401', async () => {
136
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.repo.putRecord', {
137
+
method: 'POST',
138
+
headers: {
139
+
'Content-Type': 'application/json',
140
+
'Authorization': 'Bearer test-token',
141
+
},
142
+
body: JSON.stringify({
143
+
repo: 'did:plc:test',
144
+
collection: 'app.bsky.feed.post',
145
+
rkey: '3abc',
146
+
record: { text: 'test' },
147
+
}),
148
+
});
149
+
const response = await pds.fetch(request);
150
+
expect(response.status).toBe(401);
151
+
const body = await response.json();
152
+
expect(body.error).toBe('AuthenticationRequired');
153
+
});
154
+
155
+
it('should reject deleteRecord with 401', async () => {
156
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.repo.deleteRecord', {
157
+
method: 'POST',
158
+
headers: {
159
+
'Content-Type': 'application/json',
160
+
'Authorization': 'Bearer test-token',
161
+
},
162
+
body: JSON.stringify({
163
+
repo: 'did:plc:test',
164
+
collection: 'app.bsky.feed.post',
165
+
rkey: '3abc',
166
+
}),
167
+
});
168
+
const response = await pds.fetch(request);
169
+
expect(response.status).toBe(401);
170
+
const body = await response.json();
171
+
expect(body.error).toBe('AuthenticationRequired');
172
+
});
173
+
174
+
it('should reject applyWrites with 401', async () => {
175
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.repo.applyWrites', {
176
+
method: 'POST',
177
+
headers: {
178
+
'Content-Type': 'application/json',
179
+
'Authorization': 'Bearer test-token',
180
+
},
181
+
body: JSON.stringify({
182
+
repo: 'did:plc:test',
183
+
writes: [],
184
+
}),
185
+
});
186
+
const response = await pds.fetch(request);
187
+
expect(response.status).toBe(401);
188
+
const body = await response.json();
189
+
expect(body.error).toBe('AuthenticationRequired');
190
+
});
191
+
192
+
it('should reject uploadBlob with 401', async () => {
193
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.repo.uploadBlob', {
194
+
method: 'POST',
195
+
headers: {
196
+
'Content-Type': 'image/png',
197
+
'Authorization': 'Bearer test-token',
198
+
},
199
+
body: new Uint8Array([0x89, 0x50, 0x4e, 0x47]),
200
+
});
201
+
const response = await pds.fetch(request);
202
+
expect(response.status).toBe(401);
203
+
const body = await response.json();
204
+
expect(body.error).toBe('AuthenticationRequired');
205
+
});
206
+
207
+
it('should reject putPreferences with 401', async () => {
208
+
const request = new Request('https://pds.example.com/xrpc/app.bsky.actor.putPreferences', {
209
+
method: 'POST',
210
+
headers: {
211
+
'Content-Type': 'application/json',
212
+
'Authorization': 'Bearer test-token',
213
+
},
214
+
body: JSON.stringify({ preferences: [] }),
215
+
});
216
+
const response = await pds.fetch(request);
217
+
expect(response.status).toBe(401);
218
+
const body = await response.json();
219
+
expect(body.error).toBe('AuthenticationRequired');
220
+
});
221
+
222
+
it('should reject /init with 401', async () => {
223
+
const request = new Request('https://pds.example.com/init', {
224
+
method: 'POST',
225
+
headers: { 'Content-Type': 'application/json' },
226
+
body: JSON.stringify({ did: 'did:plc:test', privateKey: 'abc123' }),
227
+
});
228
+
const response = await pds.fetch(request);
229
+
expect(response.status).toBe(401);
230
+
const body = await response.json();
231
+
expect(body.error).toBe('AuthenticationRequired');
232
+
});
233
+
234
+
it('should reject OAuth PAR with 401', async () => {
235
+
const request = new Request('https://pds.example.com/oauth/par', {
236
+
method: 'POST',
237
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
238
+
body: 'client_id=test&redirect_uri=http://localhost',
239
+
});
240
+
const response = await pds.fetch(request);
241
+
expect(response.status).toBe(401);
242
+
const body = await response.json();
243
+
expect(body.error).toBe('AuthenticationRequired');
244
+
});
245
+
246
+
it('should reject OAuth token with 401', async () => {
247
+
const request = new Request('https://pds.example.com/oauth/token', {
248
+
method: 'POST',
249
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
250
+
body: 'grant_type=authorization_code&code=test',
251
+
});
252
+
const response = await pds.fetch(request);
253
+
expect(response.status).toBe(401);
254
+
const body = await response.json();
255
+
expect(body.error).toBe('AuthenticationRequired');
256
+
});
257
+
258
+
it('should reject OAuth revoke with 401', async () => {
259
+
const request = new Request('https://pds.example.com/oauth/revoke', {
260
+
method: 'POST',
261
+
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
262
+
body: 'token=test-token',
263
+
});
264
+
const response = await pds.fetch(request);
265
+
expect(response.status).toBe(401);
266
+
const body = await response.json();
267
+
expect(body.error).toBe('AuthenticationRequired');
268
+
});
269
+
});
270
+
271
+
describe('Normal mode (readOnly=false)', () => {
272
+
/** @type {PersonalDataServer} */
273
+
let pds;
274
+
275
+
beforeEach(() => {
276
+
pds = new PersonalDataServer({
277
+
actorStorage: createMockStorage(),
278
+
sharedStorage: createMockSharedStorage(),
279
+
blobs: createMockBlobs(),
280
+
jwtSecret: 'test-secret',
281
+
readOnly: false,
282
+
});
283
+
});
284
+
285
+
it('should allow createSession (fail for other reasons, not read-only)', async () => {
286
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.server.createSession', {
287
+
method: 'POST',
288
+
headers: { 'Content-Type': 'application/json' },
289
+
body: JSON.stringify({ identifier: 'test', password: 'pass' }),
290
+
});
291
+
const response = await pds.fetch(request);
292
+
// Won't get read-only error, will get another error because of missing JWT setup
293
+
const body = await response.json();
294
+
expect(body.message).not.toBe('This PDS is read-only');
295
+
});
296
+
});
297
+
298
+
describe('Default mode (readOnly not specified)', () => {
299
+
it('should default to non-read-only mode', async () => {
300
+
const pds = new PersonalDataServer({
301
+
actorStorage: createMockStorage(),
302
+
sharedStorage: createMockSharedStorage(),
303
+
blobs: createMockBlobs(),
304
+
jwtSecret: 'test-secret',
305
+
// readOnly not specified
306
+
});
307
+
308
+
const request = new Request('https://pds.example.com/xrpc/com.atproto.server.createSession', {
309
+
method: 'POST',
310
+
headers: { 'Content-Type': 'application/json' },
311
+
body: JSON.stringify({ identifier: 'test', password: 'pass' }),
312
+
});
313
+
const response = await pds.fetch(request);
314
+
const body = await response.json();
315
+
expect(body.message).not.toBe('This PDS is read-only');
316
+
});
317
+
});
History
1 round
1 comment
ngerakines.me
submitted
#0
1 commit
expand
collapse
feature: readonly pds
expand 1 comment
pull request successfully merged
looks great! i think we can probably have a readonly entry in the cloudflare package then you dont have to manually define the worker. i can shuffle stuff around later