pds.js Refactor Implementation Plan#
For Claude: REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
Goal: Improve pds.js maintainability through consolidated CBOR encoding, JSDoc documentation, route table pattern, and clarifying comments.
Architecture: Single-file refactor preserving current dependency order. Extract shared encodeHead helper for CBOR encoders. Replace PersonalDataServer.fetch if/else chain with declarative route table. Add JSDoc to exported functions and "why" comments to protocol-specific logic.
Tech Stack: JavaScript (ES modules), Cloudflare Workers, JSDoc
Task 1: Add CBOR Constants#
Files:
- Modify:
src/pds.js:1-12
Step 1: Write test for constants usage
No new test needed — existing CBOR tests will verify constants work correctly.
Step 2: Add constants section at top of file
Insert before the CID wrapper class:
// === CONSTANTS ===
// CBOR primitive markers (RFC 8949)
const CBOR_FALSE = 0xf4
const CBOR_TRUE = 0xf5
const CBOR_NULL = 0xf6
// DAG-CBOR CID link tag
const CBOR_TAG_CID = 42
Step 3: Update cborEncode to use constants
Replace in cborEncode function:
parts.push(0xf6)→parts.push(CBOR_NULL)parts.push(0xf5)→parts.push(CBOR_TRUE)parts.push(0xf4)→parts.push(CBOR_FALSE)
Step 4: Update cborEncodeDagCbor to use constants
Same replacements, plus:
parts.push(0xd8, 42)→parts.push(0xd8, CBOR_TAG_CID)
Step 5: Update cborEncodeMstNode to use constants
Same replacements for null/true/false and tag 42.
Step 6: Run tests to verify
Run: npm test
Expected: All CBOR tests pass
Step 7: Commit
git add src/pds.js
git commit -m "refactor: extract CBOR constants for clarity"
Task 2: Extract Shared encodeHead Helper#
Files:
- Modify:
src/pds.js(CBOR ENCODING section)
Step 1: Write test for large integer encoding
Already exists — test/pds.test.js has "encodes large integers >= 2^31 without overflow"
Step 2: Extract shared encodeHead function
Add after constants section, before cborEncode:
/**
* Encode CBOR type header (major type + length)
* @param {number[]} parts - Array to push bytes to
* @param {number} majorType - CBOR major type (0-7)
* @param {number} length - Value or length to encode
*/
function encodeHead(parts, majorType, length) {
const mt = majorType << 5
if (length < 24) {
parts.push(mt | length)
} else if (length < 256) {
parts.push(mt | 24, length)
} else if (length < 65536) {
parts.push(mt | 25, length >> 8, length & 0xff)
} else if (length < 4294967296) {
// Use Math.floor instead of bitshift to avoid 32-bit signed integer overflow
parts.push(mt | 26,
Math.floor(length / 0x1000000) & 0xff,
Math.floor(length / 0x10000) & 0xff,
Math.floor(length / 0x100) & 0xff,
length & 0xff)
}
}
Step 3: Update cborEncode to use shared helper
Remove the local encodeHead function. Replace calls:
encodeHead(3, bytes.length)→encodeHead(parts, 3, bytes.length)- Same pattern for all other calls
Step 4: Update cborEncodeDagCbor to use shared helper
Remove the local encodeHead function. Update all calls to pass parts as first argument.
Step 5: Update cborEncodeMstNode to use shared helper
Remove the local encodeHead function. Update all calls to pass parts as first argument.
Step 6: Run tests
Run: npm test
Expected: All tests pass
Step 7: Commit
git add src/pds.js
git commit -m "refactor: consolidate CBOR encodeHead into shared helper"
Task 3: Add JSDoc to Exported Functions#
Files:
- Modify:
src/pds.js
Step 1: Add JSDoc to cborEncode
/**
* Encode a value as CBOR bytes (RFC 8949 deterministic encoding)
* @param {*} value - Value to encode (null, boolean, number, string, Uint8Array, array, or object)
* @returns {Uint8Array} CBOR-encoded bytes
*/
export function cborEncode(value) {
Step 2: Add JSDoc to cborDecode
/**
* Decode CBOR bytes to a JavaScript value
* @param {Uint8Array} bytes - CBOR-encoded bytes
* @returns {*} Decoded value
*/
export function cborDecode(bytes) {
Step 3: Add JSDoc to CID functions
/**
* Create a CIDv1 (dag-cbor + sha-256) from raw bytes
* @param {Uint8Array} bytes - Content to hash
* @returns {Promise<Uint8Array>} CID bytes (36 bytes: version + codec + multihash)
*/
export async function createCid(bytes) {
/**
* Convert CID bytes to base32lower string representation
* @param {Uint8Array} cid - CID bytes
* @returns {string} Base32lower-encoded CID with 'b' prefix
*/
export function cidToString(cid) {
/**
* Encode bytes as base32lower string
* @param {Uint8Array} bytes - Bytes to encode
* @returns {string} Base32lower-encoded string
*/
export function base32Encode(bytes) {
Step 4: Add JSDoc to TID function
/**
* Generate a timestamp-based ID (TID) for record keys
* Monotonic within a process, sortable by time
* @returns {string} 13-character base32-sort encoded TID
*/
export function createTid() {
Step 5: Add JSDoc to signing functions
/**
* Import a raw P-256 private key for signing
* @param {Uint8Array} privateKeyBytes - 32-byte raw private key
* @returns {Promise<CryptoKey>} Web Crypto key handle
*/
export async function importPrivateKey(privateKeyBytes) {
/**
* Sign data with ECDSA P-256, returning low-S normalized signature
* @param {CryptoKey} privateKey - Web Crypto key from importPrivateKey
* @param {Uint8Array} data - Data to sign
* @returns {Promise<Uint8Array>} 64-byte signature (r || s)
*/
export async function sign(privateKey, data) {
/**
* Generate a new P-256 key pair
* @returns {Promise<{privateKey: Uint8Array, publicKey: Uint8Array}>} 32-byte private key, 33-byte compressed public key
*/
export async function generateKeyPair() {
Step 6: Add JSDoc to utility functions
/**
* Convert bytes to hexadecimal string
* @param {Uint8Array} bytes - Bytes to convert
* @returns {string} Hex string
*/
export function bytesToHex(bytes) {
/**
* Convert hexadecimal string to bytes
* @param {string} hex - Hex string
* @returns {Uint8Array} Decoded bytes
*/
export function hexToBytes(hex) {
/**
* Get MST tree depth for a key based on leading zeros in SHA-256 hash
* @param {string} key - Record key (collection/rkey)
* @returns {Promise<number>} Tree depth (leading zeros / 2)
*/
export async function getKeyDepth(key) {
/**
* Encode integer as unsigned varint
* @param {number} n - Non-negative integer
* @returns {Uint8Array} Varint-encoded bytes
*/
export function varint(n) {
/**
* Convert base32lower CID string to raw bytes
* @param {string} cidStr - CID string with 'b' prefix
* @returns {Uint8Array} CID bytes
*/
export function cidToBytes(cidStr) {
/**
* Decode base32lower string to bytes
* @param {string} str - Base32lower-encoded string
* @returns {Uint8Array} Decoded bytes
*/
export function base32Decode(str) {
/**
* Build a CAR (Content Addressable aRchive) file
* @param {string} rootCid - Root CID string
* @param {Array<{cid: string, data: Uint8Array}>} blocks - Blocks to include
* @returns {Uint8Array} CAR file bytes
*/
export function buildCarFile(rootCid, blocks) {
Step 7: Run tests
Run: npm test
Expected: All tests pass (JSDoc doesn't affect runtime)
Step 8: Commit
git add src/pds.js
git commit -m "docs: add JSDoc to exported functions"
Task 4: Add "Why" Comments to Protocol Logic#
Files:
- Modify:
src/pds.js
Step 1: Add comment to DAG-CBOR key sorting
In cborEncodeDagCbor, before the keys.sort() call:
// DAG-CBOR: sort keys by length first, then lexicographically
// (differs from standard CBOR which sorts lexicographically only)
const keys = Object.keys(val).filter(k => val[k] !== undefined)
keys.sort((a, b) => {
Step 2: Add comment to MST depth calculation
In getKeyDepth, before the return:
// MST depth = leading zeros in SHA-256 hash / 2
// This creates a probabilistic tree where ~50% of keys are at depth 0,
// ~25% at depth 1, etc., giving O(log n) lookups
const depth = Math.floor(zeros / 2)
Step 3: Add comment to low-S normalization
In sign function, before the if statement:
// Low-S normalization: Bitcoin/ATProto require S <= N/2 to prevent
// signature malleability (two valid signatures for same message)
if (sBigInt > P256_N_DIV_2) {
Step 4: Add comment to CID tag encoding
In cborEncodeDagCbor, at the CID encoding:
} else if (val instanceof CID) {
// CID links in DAG-CBOR use tag 42 + 0x00 multibase prefix
// The 0x00 prefix indicates "identity" multibase (raw bytes)
parts.push(0xd8, CBOR_TAG_CID)
Step 5: Run tests
Run: npm test
Expected: All tests pass
Step 6: Commit
git add src/pds.js
git commit -m "docs: add 'why' comments to protocol-specific logic"
Task 5: Extract PersonalDataServer Route Table#
Files:
- Modify:
src/pds.js(PERSONAL DATA SERVER section)
Step 1: Define route table before class
Add before export class PersonalDataServer:
/**
* Route handler function type
* @callback RouteHandler
* @param {PersonalDataServer} pds - PDS instance
* @param {Request} request - HTTP request
* @param {URL} url - Parsed URL
* @returns {Promise<Response>} HTTP response
*/
/**
* @typedef {Object} Route
* @property {string} [method] - Required HTTP method (default: any)
* @property {RouteHandler} handler - Handler function
*/
/** @type {Record<string, Route>} */
const pdsRoutes = {
'/.well-known/atproto-did': {
handler: (pds, req, url) => pds.handleAtprotoDid()
},
'/init': {
method: 'POST',
handler: (pds, req, url) => pds.handleInit(req)
},
'/status': {
handler: (pds, req, url) => pds.handleStatus()
},
'/reset-repo': {
handler: (pds, req, url) => pds.handleResetRepo()
},
'/forward-event': {
handler: (pds, req, url) => pds.handleForwardEvent(req)
},
'/register-did': {
handler: (pds, req, url) => pds.handleRegisterDid(req)
},
'/get-registered-dids': {
handler: (pds, req, url) => pds.handleGetRegisteredDids()
},
'/repo-info': {
handler: (pds, req, url) => pds.handleRepoInfo()
},
'/xrpc/com.atproto.server.describeServer': {
handler: (pds, req, url) => pds.handleDescribeServer(req)
},
'/xrpc/com.atproto.sync.listRepos': {
handler: (pds, req, url) => pds.handleListRepos()
},
'/xrpc/com.atproto.repo.createRecord': {
method: 'POST',
handler: (pds, req, url) => pds.handleCreateRecord(req)
},
'/xrpc/com.atproto.repo.getRecord': {
handler: (pds, req, url) => pds.handleGetRecord(url)
},
'/xrpc/com.atproto.sync.getLatestCommit': {
handler: (pds, req, url) => pds.handleGetLatestCommit()
},
'/xrpc/com.atproto.sync.getRepoStatus': {
handler: (pds, req, url) => pds.handleGetRepoStatus()
},
'/xrpc/com.atproto.sync.getRepo': {
handler: (pds, req, url) => pds.handleGetRepo()
},
'/xrpc/com.atproto.sync.subscribeRepos': {
handler: (pds, req, url) => pds.handleSubscribeRepos(req, url)
}
}
Step 2: Extract handleAtprotoDid method
Add to PersonalDataServer class:
async handleAtprotoDid() {
let did = await this.getDid()
if (!did) {
const registeredDids = await this.state.storage.get('registeredDids') || []
did = registeredDids[0]
}
if (!did) {
return new Response('User not found', { status: 404 })
}
return new Response(did, { headers: { 'Content-Type': 'text/plain' } })
}
Step 3: Extract handleInit method
async handleInit(request) {
const body = await request.json()
if (!body.did || !body.privateKey) {
return Response.json({ error: 'missing did or privateKey' }, { status: 400 })
}
await this.initIdentity(body.did, body.privateKey, body.handle || null)
return Response.json({ ok: true, did: body.did, handle: body.handle || null })
}
Step 4: Extract handleStatus method
async handleStatus() {
const did = await this.getDid()
return Response.json({ initialized: !!did, did: did || null })
}
Step 5: Extract handleResetRepo method
async handleResetRepo() {
this.sql.exec(`DELETE FROM blocks`)
this.sql.exec(`DELETE FROM records`)
this.sql.exec(`DELETE FROM commits`)
this.sql.exec(`DELETE FROM seq_events`)
await this.state.storage.delete('head')
await this.state.storage.delete('rev')
return Response.json({ ok: true, message: 'repo data cleared' })
}
Step 6: Extract handleForwardEvent method
async handleForwardEvent(request) {
const evt = await request.json()
const numSockets = [...this.state.getWebSockets()].length
console.log(`forward-event: received event seq=${evt.seq}, ${numSockets} connected sockets`)
this.broadcastEvent({
seq: evt.seq,
did: evt.did,
commit_cid: evt.commit_cid,
evt: new Uint8Array(Object.values(evt.evt))
})
return Response.json({ ok: true, sockets: numSockets })
}
Step 7: Extract handleRegisterDid method
async handleRegisterDid(request) {
const body = await request.json()
const registeredDids = await this.state.storage.get('registeredDids') || []
if (!registeredDids.includes(body.did)) {
registeredDids.push(body.did)
await this.state.storage.put('registeredDids', registeredDids)
}
return Response.json({ ok: true })
}
Step 8: Extract handleGetRegisteredDids method
async handleGetRegisteredDids() {
const registeredDids = await this.state.storage.get('registeredDids') || []
return Response.json({ dids: registeredDids })
}
Step 9: Extract handleRepoInfo method
async handleRepoInfo() {
const head = await this.state.storage.get('head')
const rev = await this.state.storage.get('rev')
return Response.json({ head: head || null, rev: rev || null })
}
Step 10: Extract handleDescribeServer method
handleDescribeServer(request) {
const hostname = request.headers.get('x-hostname') || 'localhost'
return Response.json({
did: `did:web:${hostname}`,
availableUserDomains: [`.${hostname}`],
inviteCodeRequired: false,
phoneVerificationRequired: false,
links: {},
contact: {}
})
}
Step 11: Extract handleListRepos method
async handleListRepos() {
const registeredDids = await this.state.storage.get('registeredDids') || []
const did = await this.getDid()
const repos = did ? [{ did, head: null, rev: null }] :
registeredDids.map(d => ({ did: d, head: null, rev: null }))
return Response.json({ repos })
}
Step 12: Extract handleCreateRecord method
async handleCreateRecord(request) {
const body = await request.json()
if (!body.collection || !body.record) {
return Response.json({ error: 'missing collection or record' }, { status: 400 })
}
try {
const result = await this.createRecord(body.collection, body.record, body.rkey)
return Response.json(result)
} catch (err) {
return Response.json({ error: err.message }, { status: 500 })
}
}
Step 13: Extract handleGetRecord method
async handleGetRecord(url) {
const collection = url.searchParams.get('collection')
const rkey = url.searchParams.get('rkey')
if (!collection || !rkey) {
return Response.json({ error: 'missing collection or rkey' }, { status: 400 })
}
const did = await this.getDid()
const uri = `at://${did}/${collection}/${rkey}`
const rows = this.sql.exec(
`SELECT cid, value FROM records WHERE uri = ?`, uri
).toArray()
if (rows.length === 0) {
return Response.json({ error: 'record not found' }, { status: 404 })
}
const row = rows[0]
const value = cborDecode(new Uint8Array(row.value))
return Response.json({ uri, cid: row.cid, value })
}
Step 14: Extract handleGetLatestCommit method
handleGetLatestCommit() {
const commits = this.sql.exec(
`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`
).toArray()
if (commits.length === 0) {
return Response.json({ error: 'RepoNotFound', message: 'repo not found' }, { status: 404 })
}
return Response.json({ cid: commits[0].cid, rev: commits[0].rev })
}
Step 15: Extract handleGetRepoStatus method
async handleGetRepoStatus() {
const did = await this.getDid()
const commits = this.sql.exec(
`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`
).toArray()
if (commits.length === 0 || !did) {
return Response.json({ error: 'RepoNotFound', message: 'repo not found' }, { status: 404 })
}
return Response.json({ did, active: true, status: 'active', rev: commits[0].rev })
}
Step 16: Extract handleGetRepo method
handleGetRepo() {
const commits = this.sql.exec(
`SELECT cid FROM commits ORDER BY seq DESC LIMIT 1`
).toArray()
if (commits.length === 0) {
return Response.json({ error: 'repo not found' }, { status: 404 })
}
const blocks = this.sql.exec(`SELECT cid, data FROM blocks`).toArray()
const blocksForCar = blocks.map(b => ({
cid: b.cid,
data: new Uint8Array(b.data)
}))
const car = buildCarFile(commits[0].cid, blocksForCar)
return new Response(car, {
headers: { 'content-type': 'application/vnd.ipld.car' }
})
}
Step 17: Extract handleSubscribeRepos method
handleSubscribeRepos(request, url) {
const upgradeHeader = request.headers.get('Upgrade')
if (upgradeHeader !== 'websocket') {
return new Response('expected websocket', { status: 426 })
}
const { 0: client, 1: server } = new WebSocketPair()
this.state.acceptWebSocket(server)
const cursor = url.searchParams.get('cursor')
if (cursor) {
const events = this.sql.exec(
`SELECT * FROM seq_events WHERE seq > ? ORDER BY seq`,
parseInt(cursor)
).toArray()
for (const evt of events) {
server.send(this.formatEvent(evt))
}
}
return new Response(null, { status: 101, webSocket: client })
}
Step 18: Replace fetch method with router
async fetch(request) {
const url = new URL(request.url)
const route = pdsRoutes[url.pathname]
if (!route) {
return Response.json({ error: 'not found' }, { status: 404 })
}
if (route.method && request.method !== route.method) {
return Response.json({ error: 'method not allowed' }, { status: 405 })
}
return route.handler(this, request, url)
}
Step 19: Run tests
Run: npm test
Expected: All tests pass
Step 20: Commit
git add src/pds.js
git commit -m "refactor: extract PersonalDataServer route table"
Summary#
After completing all tasks, the file will have:
- Named constants for CBOR markers and CID tag
- Single shared
encodeHeadhelper (no duplication) - JSDoc on all 15 exported functions
- "Why" comments on 4 protocol-specific code sections
- Declarative route table with 16 focused handler methods
- Same dependency order, same single file