.wrangler/state/v3/do/atproto-pds-PersonalDataServer/d67d37442b3ca7789897149c593e43bcf1fd6a9933be25596eb0cbe4c6a0783e.sqlite
.wrangler/state/v3/do/atproto-pds-PersonalDataServer/d67d37442b3ca7789897149c593e43bcf1fd6a9933be25596eb0cbe4c6a0783e.sqlite
This is a binary file and will not be displayed.
.wrangler/state/v3/do/atproto-pds-PersonalDataServer/d67d37442b3ca7789897149c593e43bcf1fd6a9933be25596eb0cbe4c6a0783e.sqlite-shm
.wrangler/state/v3/do/atproto-pds-PersonalDataServer/d67d37442b3ca7789897149c593e43bcf1fd6a9933be25596eb0cbe4c6a0783e.sqlite-shm
This is a binary file and will not be displayed.
.wrangler/state/v3/do/atproto-pds-PersonalDataServer/d67d37442b3ca7789897149c593e43bcf1fd6a9933be25596eb0cbe4c6a0783e.sqlite-wal
.wrangler/state/v3/do/atproto-pds-PersonalDataServer/d67d37442b3ca7789897149c593e43bcf1fd6a9933be25596eb0cbe4c6a0783e.sqlite-wal
This is a binary file and will not be displayed.
+30
.wrangler/tmp/bundle-MaCAbF/checked-fetch.js
+30
.wrangler/tmp/bundle-MaCAbF/checked-fetch.js
···
1
+
const urls = new Set();
2
+
3
+
function checkURL(request, init) {
4
+
const url =
5
+
request instanceof URL
6
+
? request
7
+
: new URL(
8
+
(typeof request === "string"
9
+
? new Request(request, init)
10
+
: request
11
+
).url
12
+
);
13
+
if (url.port && url.port !== "443" && url.protocol === "https:") {
14
+
if (!urls.has(url.toString())) {
15
+
urls.add(url.toString());
16
+
console.warn(
17
+
`WARNING: known issue with \`fetch()\` requests to custom HTTPS ports in published Workers:\n` +
18
+
` - ${url.toString()} - the custom port will be ignored when the Worker is published using the \`wrangler deploy\` command.\n`
19
+
);
20
+
}
21
+
}
22
+
}
23
+
24
+
globalThis.fetch = new Proxy(globalThis.fetch, {
25
+
apply(target, thisArg, argArray) {
26
+
const [request, init] = argArray;
27
+
checkURL(request, init);
28
+
return Reflect.apply(target, thisArg, argArray);
29
+
},
30
+
});
+11
.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js
+11
.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js
···
1
+
import worker, * as OTHER_EXPORTS from "/Users/chadmiller/code/pds-experiment/src/pds.js";
2
+
import * as __MIDDLEWARE_0__ from "/Users/chadmiller/.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-ensure-req-body-drained.ts";
3
+
import * as __MIDDLEWARE_1__ from "/Users/chadmiller/.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-miniflare3-json-error.ts";
4
+
5
+
export * from "/Users/chadmiller/code/pds-experiment/src/pds.js";
6
+
const MIDDLEWARE_TEST_INJECT = "__INJECT_FOR_TESTING_WRANGLER_MIDDLEWARE__";
7
+
export const __INTERNAL_WRANGLER_MIDDLEWARE__ = [
8
+
9
+
__MIDDLEWARE_0__.default,__MIDDLEWARE_1__.default
10
+
]
11
+
export default worker;
+134
.wrangler/tmp/bundle-MaCAbF/middleware-loader.entry.ts
+134
.wrangler/tmp/bundle-MaCAbF/middleware-loader.entry.ts
···
1
+
// This loads all middlewares exposed on the middleware object and then starts
2
+
// the invocation chain. The big idea is that we can add these to the middleware
3
+
// export dynamically through wrangler, or we can potentially let users directly
4
+
// add them as a sort of "plugin" system.
5
+
6
+
import ENTRY, { __INTERNAL_WRANGLER_MIDDLEWARE__ } from "/Users/chadmiller/code/pds-experiment/.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js";
7
+
import { __facade_invoke__, __facade_register__, Dispatcher } from "/Users/chadmiller/.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/common.ts";
8
+
import type { WorkerEntrypointConstructor } from "/Users/chadmiller/code/pds-experiment/.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js";
9
+
10
+
// Preserve all the exports from the worker
11
+
export * from "/Users/chadmiller/code/pds-experiment/.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js";
12
+
13
+
class __Facade_ScheduledController__ implements ScheduledController {
14
+
readonly #noRetry: ScheduledController["noRetry"];
15
+
16
+
constructor(
17
+
readonly scheduledTime: number,
18
+
readonly cron: string,
19
+
noRetry: ScheduledController["noRetry"]
20
+
) {
21
+
this.#noRetry = noRetry;
22
+
}
23
+
24
+
noRetry() {
25
+
if (!(this instanceof __Facade_ScheduledController__)) {
26
+
throw new TypeError("Illegal invocation");
27
+
}
28
+
// Need to call native method immediately in case uncaught error thrown
29
+
this.#noRetry();
30
+
}
31
+
}
32
+
33
+
function wrapExportedHandler(worker: ExportedHandler): ExportedHandler {
34
+
// If we don't have any middleware defined, just return the handler as is
35
+
if (
36
+
__INTERNAL_WRANGLER_MIDDLEWARE__ === undefined ||
37
+
__INTERNAL_WRANGLER_MIDDLEWARE__.length === 0
38
+
) {
39
+
return worker;
40
+
}
41
+
// Otherwise, register all middleware once
42
+
for (const middleware of __INTERNAL_WRANGLER_MIDDLEWARE__) {
43
+
__facade_register__(middleware);
44
+
}
45
+
46
+
const fetchDispatcher: ExportedHandlerFetchHandler = function (
47
+
request,
48
+
env,
49
+
ctx
50
+
) {
51
+
if (worker.fetch === undefined) {
52
+
throw new Error("Handler does not export a fetch() function.");
53
+
}
54
+
return worker.fetch(request, env, ctx);
55
+
};
56
+
57
+
return {
58
+
...worker,
59
+
fetch(request, env, ctx) {
60
+
const dispatcher: Dispatcher = function (type, init) {
61
+
if (type === "scheduled" && worker.scheduled !== undefined) {
62
+
const controller = new __Facade_ScheduledController__(
63
+
Date.now(),
64
+
init.cron ?? "",
65
+
() => {}
66
+
);
67
+
return worker.scheduled(controller, env, ctx);
68
+
}
69
+
};
70
+
return __facade_invoke__(request, env, ctx, dispatcher, fetchDispatcher);
71
+
},
72
+
};
73
+
}
74
+
75
+
function wrapWorkerEntrypoint(
76
+
klass: WorkerEntrypointConstructor
77
+
): WorkerEntrypointConstructor {
78
+
// If we don't have any middleware defined, just return the handler as is
79
+
if (
80
+
__INTERNAL_WRANGLER_MIDDLEWARE__ === undefined ||
81
+
__INTERNAL_WRANGLER_MIDDLEWARE__.length === 0
82
+
) {
83
+
return klass;
84
+
}
85
+
// Otherwise, register all middleware once
86
+
for (const middleware of __INTERNAL_WRANGLER_MIDDLEWARE__) {
87
+
__facade_register__(middleware);
88
+
}
89
+
90
+
// `extend`ing `klass` here so other RPC methods remain callable
91
+
return class extends klass {
92
+
#fetchDispatcher: ExportedHandlerFetchHandler<Record<string, unknown>> = (
93
+
request,
94
+
env,
95
+
ctx
96
+
) => {
97
+
this.env = env;
98
+
this.ctx = ctx;
99
+
if (super.fetch === undefined) {
100
+
throw new Error("Entrypoint class does not define a fetch() function.");
101
+
}
102
+
return super.fetch(request);
103
+
};
104
+
105
+
#dispatcher: Dispatcher = (type, init) => {
106
+
if (type === "scheduled" && super.scheduled !== undefined) {
107
+
const controller = new __Facade_ScheduledController__(
108
+
Date.now(),
109
+
init.cron ?? "",
110
+
() => {}
111
+
);
112
+
return super.scheduled(controller);
113
+
}
114
+
};
115
+
116
+
fetch(request: Request<unknown, IncomingRequestCfProperties>) {
117
+
return __facade_invoke__(
118
+
request,
119
+
this.env,
120
+
this.ctx,
121
+
this.#dispatcher,
122
+
this.#fetchDispatcher
123
+
);
124
+
}
125
+
};
126
+
}
127
+
128
+
let WRAPPED_ENTRY: ExportedHandler | WorkerEntrypointConstructor | undefined;
129
+
if (typeof ENTRY === "object") {
130
+
WRAPPED_ENTRY = wrapExportedHandler(ENTRY);
131
+
} else if (typeof ENTRY === "function") {
132
+
WRAPPED_ENTRY = wrapWorkerEntrypoint(ENTRY);
133
+
}
134
+
export default WRAPPED_ENTRY;
+230
.wrangler/tmp/dev-qpR91k/pds.js
+230
.wrangler/tmp/dev-qpR91k/pds.js
···
1
+
var __defProp = Object.defineProperty;
2
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
+
4
+
// .wrangler/tmp/bundle-MaCAbF/checked-fetch.js
5
+
var urls = /* @__PURE__ */ new Set();
6
+
function checkURL(request, init) {
7
+
const url = request instanceof URL ? request : new URL(
8
+
(typeof request === "string" ? new Request(request, init) : request).url
9
+
);
10
+
if (url.port && url.port !== "443" && url.protocol === "https:") {
11
+
if (!urls.has(url.toString())) {
12
+
urls.add(url.toString());
13
+
console.warn(
14
+
`WARNING: known issue with \`fetch()\` requests to custom HTTPS ports in published Workers:
15
+
- ${url.toString()} - the custom port will be ignored when the Worker is published using the \`wrangler deploy\` command.
16
+
`
17
+
);
18
+
}
19
+
}
20
+
}
21
+
__name(checkURL, "checkURL");
22
+
globalThis.fetch = new Proxy(globalThis.fetch, {
23
+
apply(target, thisArg, argArray) {
24
+
const [request, init] = argArray;
25
+
checkURL(request, init);
26
+
return Reflect.apply(target, thisArg, argArray);
27
+
}
28
+
});
29
+
30
+
// src/pds.js
31
+
var PersonalDataServer = class {
32
+
static {
33
+
__name(this, "PersonalDataServer");
34
+
}
35
+
constructor(state, env) {
36
+
this.state = state;
37
+
this.sql = state.storage.sql;
38
+
}
39
+
async fetch(request) {
40
+
return new Response("pds running", { status: 200 });
41
+
}
42
+
};
43
+
var pds_default = {
44
+
async fetch(request, env) {
45
+
const url = new URL(request.url);
46
+
const did = url.searchParams.get("did");
47
+
if (!did) {
48
+
return new Response("missing did param", { status: 400 });
49
+
}
50
+
const id = env.PDS.idFromName(did);
51
+
const pds = env.PDS.get(id);
52
+
return pds.fetch(request);
53
+
}
54
+
};
55
+
56
+
// ../../.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-ensure-req-body-drained.ts
57
+
var drainBody = /* @__PURE__ */ __name(async (request, env, _ctx, middlewareCtx) => {
58
+
try {
59
+
return await middlewareCtx.next(request, env);
60
+
} finally {
61
+
try {
62
+
if (request.body !== null && !request.bodyUsed) {
63
+
const reader = request.body.getReader();
64
+
while (!(await reader.read()).done) {
65
+
}
66
+
}
67
+
} catch (e) {
68
+
console.error("Failed to drain the unused request body.", e);
69
+
}
70
+
}
71
+
}, "drainBody");
72
+
var middleware_ensure_req_body_drained_default = drainBody;
73
+
74
+
// ../../.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-miniflare3-json-error.ts
75
+
function reduceError(e) {
76
+
return {
77
+
name: e?.name,
78
+
message: e?.message ?? String(e),
79
+
stack: e?.stack,
80
+
cause: e?.cause === void 0 ? void 0 : reduceError(e.cause)
81
+
};
82
+
}
83
+
__name(reduceError, "reduceError");
84
+
var jsonError = /* @__PURE__ */ __name(async (request, env, _ctx, middlewareCtx) => {
85
+
try {
86
+
return await middlewareCtx.next(request, env);
87
+
} catch (e) {
88
+
const error = reduceError(e);
89
+
return Response.json(error, {
90
+
status: 500,
91
+
headers: { "MF-Experimental-Error-Stack": "true" }
92
+
});
93
+
}
94
+
}, "jsonError");
95
+
var middleware_miniflare3_json_error_default = jsonError;
96
+
97
+
// .wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js
98
+
var __INTERNAL_WRANGLER_MIDDLEWARE__ = [
99
+
middleware_ensure_req_body_drained_default,
100
+
middleware_miniflare3_json_error_default
101
+
];
102
+
var middleware_insertion_facade_default = pds_default;
103
+
104
+
// ../../.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/common.ts
105
+
var __facade_middleware__ = [];
106
+
function __facade_register__(...args) {
107
+
__facade_middleware__.push(...args.flat());
108
+
}
109
+
__name(__facade_register__, "__facade_register__");
110
+
function __facade_invokeChain__(request, env, ctx, dispatch, middlewareChain) {
111
+
const [head, ...tail] = middlewareChain;
112
+
const middlewareCtx = {
113
+
dispatch,
114
+
next(newRequest, newEnv) {
115
+
return __facade_invokeChain__(newRequest, newEnv, ctx, dispatch, tail);
116
+
}
117
+
};
118
+
return head(request, env, ctx, middlewareCtx);
119
+
}
120
+
__name(__facade_invokeChain__, "__facade_invokeChain__");
121
+
function __facade_invoke__(request, env, ctx, dispatch, finalMiddleware) {
122
+
return __facade_invokeChain__(request, env, ctx, dispatch, [
123
+
...__facade_middleware__,
124
+
finalMiddleware
125
+
]);
126
+
}
127
+
__name(__facade_invoke__, "__facade_invoke__");
128
+
129
+
// .wrangler/tmp/bundle-MaCAbF/middleware-loader.entry.ts
130
+
var __Facade_ScheduledController__ = class ___Facade_ScheduledController__ {
131
+
constructor(scheduledTime, cron, noRetry) {
132
+
this.scheduledTime = scheduledTime;
133
+
this.cron = cron;
134
+
this.#noRetry = noRetry;
135
+
}
136
+
static {
137
+
__name(this, "__Facade_ScheduledController__");
138
+
}
139
+
#noRetry;
140
+
noRetry() {
141
+
if (!(this instanceof ___Facade_ScheduledController__)) {
142
+
throw new TypeError("Illegal invocation");
143
+
}
144
+
this.#noRetry();
145
+
}
146
+
};
147
+
function wrapExportedHandler(worker) {
148
+
if (__INTERNAL_WRANGLER_MIDDLEWARE__ === void 0 || __INTERNAL_WRANGLER_MIDDLEWARE__.length === 0) {
149
+
return worker;
150
+
}
151
+
for (const middleware of __INTERNAL_WRANGLER_MIDDLEWARE__) {
152
+
__facade_register__(middleware);
153
+
}
154
+
const fetchDispatcher = /* @__PURE__ */ __name(function(request, env, ctx) {
155
+
if (worker.fetch === void 0) {
156
+
throw new Error("Handler does not export a fetch() function.");
157
+
}
158
+
return worker.fetch(request, env, ctx);
159
+
}, "fetchDispatcher");
160
+
return {
161
+
...worker,
162
+
fetch(request, env, ctx) {
163
+
const dispatcher = /* @__PURE__ */ __name(function(type, init) {
164
+
if (type === "scheduled" && worker.scheduled !== void 0) {
165
+
const controller = new __Facade_ScheduledController__(
166
+
Date.now(),
167
+
init.cron ?? "",
168
+
() => {
169
+
}
170
+
);
171
+
return worker.scheduled(controller, env, ctx);
172
+
}
173
+
}, "dispatcher");
174
+
return __facade_invoke__(request, env, ctx, dispatcher, fetchDispatcher);
175
+
}
176
+
};
177
+
}
178
+
__name(wrapExportedHandler, "wrapExportedHandler");
179
+
function wrapWorkerEntrypoint(klass) {
180
+
if (__INTERNAL_WRANGLER_MIDDLEWARE__ === void 0 || __INTERNAL_WRANGLER_MIDDLEWARE__.length === 0) {
181
+
return klass;
182
+
}
183
+
for (const middleware of __INTERNAL_WRANGLER_MIDDLEWARE__) {
184
+
__facade_register__(middleware);
185
+
}
186
+
return class extends klass {
187
+
#fetchDispatcher = /* @__PURE__ */ __name((request, env, ctx) => {
188
+
this.env = env;
189
+
this.ctx = ctx;
190
+
if (super.fetch === void 0) {
191
+
throw new Error("Entrypoint class does not define a fetch() function.");
192
+
}
193
+
return super.fetch(request);
194
+
}, "#fetchDispatcher");
195
+
#dispatcher = /* @__PURE__ */ __name((type, init) => {
196
+
if (type === "scheduled" && super.scheduled !== void 0) {
197
+
const controller = new __Facade_ScheduledController__(
198
+
Date.now(),
199
+
init.cron ?? "",
200
+
() => {
201
+
}
202
+
);
203
+
return super.scheduled(controller);
204
+
}
205
+
}, "#dispatcher");
206
+
fetch(request) {
207
+
return __facade_invoke__(
208
+
request,
209
+
this.env,
210
+
this.ctx,
211
+
this.#dispatcher,
212
+
this.#fetchDispatcher
213
+
);
214
+
}
215
+
};
216
+
}
217
+
__name(wrapWorkerEntrypoint, "wrapWorkerEntrypoint");
218
+
var WRAPPED_ENTRY;
219
+
if (typeof middleware_insertion_facade_default === "object") {
220
+
WRAPPED_ENTRY = wrapExportedHandler(middleware_insertion_facade_default);
221
+
} else if (typeof middleware_insertion_facade_default === "function") {
222
+
WRAPPED_ENTRY = wrapWorkerEntrypoint(middleware_insertion_facade_default);
223
+
}
224
+
var middleware_loader_entry_default = WRAPPED_ENTRY;
225
+
export {
226
+
PersonalDataServer,
227
+
__INTERNAL_WRANGLER_MIDDLEWARE__,
228
+
middleware_loader_entry_default as default
229
+
};
230
+
//# sourceMappingURL=pds.js.map
+8
.wrangler/tmp/dev-qpR91k/pds.js.map
+8
.wrangler/tmp/dev-qpR91k/pds.js.map
···
1
+
{
2
+
"version": 3,
3
+
"sources": ["../bundle-MaCAbF/checked-fetch.js", "../../../src/pds.js", "../../../../../.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-ensure-req-body-drained.ts", "../../../../../.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-miniflare3-json-error.ts", "../bundle-MaCAbF/middleware-insertion-facade.js", "../../../../../.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/common.ts", "../bundle-MaCAbF/middleware-loader.entry.ts"],
4
+
"sourceRoot": "/Users/chadmiller/code/pds-experiment/.wrangler/tmp/dev-qpR91k",
5
+
"sourcesContent": ["const urls = new Set();\n\nfunction checkURL(request, init) {\n\tconst url =\n\t\trequest instanceof URL\n\t\t\t? request\n\t\t\t: new URL(\n\t\t\t\t\t(typeof request === \"string\"\n\t\t\t\t\t\t? new Request(request, init)\n\t\t\t\t\t\t: request\n\t\t\t\t\t).url\n\t\t\t\t);\n\tif (url.port && url.port !== \"443\" && url.protocol === \"https:\") {\n\t\tif (!urls.has(url.toString())) {\n\t\t\turls.add(url.toString());\n\t\t\tconsole.warn(\n\t\t\t\t`WARNING: known issue with \\`fetch()\\` requests to custom HTTPS ports in published Workers:\\n` +\n\t\t\t\t\t` - ${url.toString()} - the custom port will be ignored when the Worker is published using the \\`wrangler deploy\\` command.\\n`\n\t\t\t);\n\t\t}\n\t}\n}\n\nglobalThis.fetch = new Proxy(globalThis.fetch, {\n\tapply(target, thisArg, argArray) {\n\t\tconst [request, init] = argArray;\n\t\tcheckURL(request, init);\n\t\treturn Reflect.apply(target, thisArg, argArray);\n\t},\n});\n", "export class PersonalDataServer {\n constructor(state, env) {\n this.state = state\n this.sql = state.storage.sql\n }\n\n async fetch(request) {\n return new Response('pds running', { status: 200 })\n }\n}\n\nexport default {\n async fetch(request, env) {\n const url = new URL(request.url)\n const did = url.searchParams.get('did')\n\n if (!did) {\n return new Response('missing did param', { status: 400 })\n }\n\n const id = env.PDS.idFromName(did)\n const pds = env.PDS.get(id)\n return pds.fetch(request)\n }\n}\n", "import type { Middleware } from \"./common\";\n\nconst drainBody: Middleware = async (request, env, _ctx, middlewareCtx) => {\n\ttry {\n\t\treturn await middlewareCtx.next(request, env);\n\t} finally {\n\t\ttry {\n\t\t\tif (request.body !== null && !request.bodyUsed) {\n\t\t\t\tconst reader = request.body.getReader();\n\t\t\t\twhile (!(await reader.read()).done) {}\n\t\t\t}\n\t\t} catch (e) {\n\t\t\tconsole.error(\"Failed to drain the unused request body.\", e);\n\t\t}\n\t}\n};\n\nexport default drainBody;\n", "import type { Middleware } from \"./common\";\n\ninterface JsonError {\n\tmessage?: string;\n\tname?: string;\n\tstack?: string;\n\tcause?: JsonError;\n}\n\nfunction reduceError(e: any): JsonError {\n\treturn {\n\t\tname: e?.name,\n\t\tmessage: e?.message ?? String(e),\n\t\tstack: e?.stack,\n\t\tcause: e?.cause === undefined ? undefined : reduceError(e.cause),\n\t};\n}\n\n// See comment in `bundle.ts` for details on why this is needed\nconst jsonError: Middleware = async (request, env, _ctx, middlewareCtx) => {\n\ttry {\n\t\treturn await middlewareCtx.next(request, env);\n\t} catch (e: any) {\n\t\tconst error = reduceError(e);\n\t\treturn Response.json(error, {\n\t\t\tstatus: 500,\n\t\t\theaders: { \"MF-Experimental-Error-Stack\": \"true\" },\n\t\t});\n\t}\n};\n\nexport default jsonError;\n", "\t\t\t\timport worker, * as OTHER_EXPORTS from \"/Users/chadmiller/code/pds-experiment/src/pds.js\";\n\t\t\t\timport * as __MIDDLEWARE_0__ from \"/Users/chadmiller/.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-ensure-req-body-drained.ts\";\nimport * as __MIDDLEWARE_1__ from \"/Users/chadmiller/.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/middleware-miniflare3-json-error.ts\";\n\n\t\t\t\texport * from \"/Users/chadmiller/code/pds-experiment/src/pds.js\";\n\t\t\t\tconst MIDDLEWARE_TEST_INJECT = \"__INJECT_FOR_TESTING_WRANGLER_MIDDLEWARE__\";\n\t\t\t\texport const __INTERNAL_WRANGLER_MIDDLEWARE__ = [\n\t\t\t\t\t\n\t\t\t\t\t__MIDDLEWARE_0__.default,__MIDDLEWARE_1__.default\n\t\t\t\t]\n\t\t\t\texport default worker;", "export type Awaitable<T> = T | Promise<T>;\n// TODO: allow dispatching more events?\nexport type Dispatcher = (\n\ttype: \"scheduled\",\n\tinit: { cron?: string }\n) => Awaitable<void>;\n\nexport type IncomingRequest = Request<\n\tunknown,\n\tIncomingRequestCfProperties<unknown>\n>;\n\nexport interface MiddlewareContext {\n\tdispatch: Dispatcher;\n\tnext(request: IncomingRequest, env: any): Awaitable<Response>;\n}\n\nexport type Middleware = (\n\trequest: IncomingRequest,\n\tenv: any,\n\tctx: ExecutionContext,\n\tmiddlewareCtx: MiddlewareContext\n) => Awaitable<Response>;\n\nconst __facade_middleware__: Middleware[] = [];\n\n// The register functions allow for the insertion of one or many middleware,\n// We register internal middleware first in the stack, but have no way of controlling\n// the order that addMiddleware is run in service workers so need an internal function.\nexport function __facade_register__(...args: (Middleware | Middleware[])[]) {\n\t__facade_middleware__.push(...args.flat());\n}\nexport function __facade_registerInternal__(\n\t...args: (Middleware | Middleware[])[]\n) {\n\t__facade_middleware__.unshift(...args.flat());\n}\n\nfunction __facade_invokeChain__(\n\trequest: IncomingRequest,\n\tenv: any,\n\tctx: ExecutionContext,\n\tdispatch: Dispatcher,\n\tmiddlewareChain: Middleware[]\n): Awaitable<Response> {\n\tconst [head, ...tail] = middlewareChain;\n\tconst middlewareCtx: MiddlewareContext = {\n\t\tdispatch,\n\t\tnext(newRequest, newEnv) {\n\t\t\treturn __facade_invokeChain__(newRequest, newEnv, ctx, dispatch, tail);\n\t\t},\n\t};\n\treturn head(request, env, ctx, middlewareCtx);\n}\n\nexport function __facade_invoke__(\n\trequest: IncomingRequest,\n\tenv: any,\n\tctx: ExecutionContext,\n\tdispatch: Dispatcher,\n\tfinalMiddleware: Middleware\n): Awaitable<Response> {\n\treturn __facade_invokeChain__(request, env, ctx, dispatch, [\n\t\t...__facade_middleware__,\n\t\tfinalMiddleware,\n\t]);\n}\n", "// This loads all middlewares exposed on the middleware object and then starts\n// the invocation chain. The big idea is that we can add these to the middleware\n// export dynamically through wrangler, or we can potentially let users directly\n// add them as a sort of \"plugin\" system.\n\nimport ENTRY, { __INTERNAL_WRANGLER_MIDDLEWARE__ } from \"/Users/chadmiller/code/pds-experiment/.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js\";\nimport { __facade_invoke__, __facade_register__, Dispatcher } from \"/Users/chadmiller/.npm/_npx/32026684e21afda6/node_modules/wrangler/templates/middleware/common.ts\";\nimport type { WorkerEntrypointConstructor } from \"/Users/chadmiller/code/pds-experiment/.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js\";\n\n// Preserve all the exports from the worker\nexport * from \"/Users/chadmiller/code/pds-experiment/.wrangler/tmp/bundle-MaCAbF/middleware-insertion-facade.js\";\n\nclass __Facade_ScheduledController__ implements ScheduledController {\n\treadonly #noRetry: ScheduledController[\"noRetry\"];\n\n\tconstructor(\n\t\treadonly scheduledTime: number,\n\t\treadonly cron: string,\n\t\tnoRetry: ScheduledController[\"noRetry\"]\n\t) {\n\t\tthis.#noRetry = noRetry;\n\t}\n\n\tnoRetry() {\n\t\tif (!(this instanceof __Facade_ScheduledController__)) {\n\t\t\tthrow new TypeError(\"Illegal invocation\");\n\t\t}\n\t\t// Need to call native method immediately in case uncaught error thrown\n\t\tthis.#noRetry();\n\t}\n}\n\nfunction wrapExportedHandler(worker: ExportedHandler): ExportedHandler {\n\t// If we don't have any middleware defined, just return the handler as is\n\tif (\n\t\t__INTERNAL_WRANGLER_MIDDLEWARE__ === undefined ||\n\t\t__INTERNAL_WRANGLER_MIDDLEWARE__.length === 0\n\t) {\n\t\treturn worker;\n\t}\n\t// Otherwise, register all middleware once\n\tfor (const middleware of __INTERNAL_WRANGLER_MIDDLEWARE__) {\n\t\t__facade_register__(middleware);\n\t}\n\n\tconst fetchDispatcher: ExportedHandlerFetchHandler = function (\n\t\trequest,\n\t\tenv,\n\t\tctx\n\t) {\n\t\tif (worker.fetch === undefined) {\n\t\t\tthrow new Error(\"Handler does not export a fetch() function.\");\n\t\t}\n\t\treturn worker.fetch(request, env, ctx);\n\t};\n\n\treturn {\n\t\t...worker,\n\t\tfetch(request, env, ctx) {\n\t\t\tconst dispatcher: Dispatcher = function (type, init) {\n\t\t\t\tif (type === \"scheduled\" && worker.scheduled !== undefined) {\n\t\t\t\t\tconst controller = new __Facade_ScheduledController__(\n\t\t\t\t\t\tDate.now(),\n\t\t\t\t\t\tinit.cron ?? \"\",\n\t\t\t\t\t\t() => {}\n\t\t\t\t\t);\n\t\t\t\t\treturn worker.scheduled(controller, env, ctx);\n\t\t\t\t}\n\t\t\t};\n\t\t\treturn __facade_invoke__(request, env, ctx, dispatcher, fetchDispatcher);\n\t\t},\n\t};\n}\n\nfunction wrapWorkerEntrypoint(\n\tklass: WorkerEntrypointConstructor\n): WorkerEntrypointConstructor {\n\t// If we don't have any middleware defined, just return the handler as is\n\tif (\n\t\t__INTERNAL_WRANGLER_MIDDLEWARE__ === undefined ||\n\t\t__INTERNAL_WRANGLER_MIDDLEWARE__.length === 0\n\t) {\n\t\treturn klass;\n\t}\n\t// Otherwise, register all middleware once\n\tfor (const middleware of __INTERNAL_WRANGLER_MIDDLEWARE__) {\n\t\t__facade_register__(middleware);\n\t}\n\n\t// `extend`ing `klass` here so other RPC methods remain callable\n\treturn class extends klass {\n\t\t#fetchDispatcher: ExportedHandlerFetchHandler<Record<string, unknown>> = (\n\t\t\trequest,\n\t\t\tenv,\n\t\t\tctx\n\t\t) => {\n\t\t\tthis.env = env;\n\t\t\tthis.ctx = ctx;\n\t\t\tif (super.fetch === undefined) {\n\t\t\t\tthrow new Error(\"Entrypoint class does not define a fetch() function.\");\n\t\t\t}\n\t\t\treturn super.fetch(request);\n\t\t};\n\n\t\t#dispatcher: Dispatcher = (type, init) => {\n\t\t\tif (type === \"scheduled\" && super.scheduled !== undefined) {\n\t\t\t\tconst controller = new __Facade_ScheduledController__(\n\t\t\t\t\tDate.now(),\n\t\t\t\t\tinit.cron ?? \"\",\n\t\t\t\t\t() => {}\n\t\t\t\t);\n\t\t\t\treturn super.scheduled(controller);\n\t\t\t}\n\t\t};\n\n\t\tfetch(request: Request<unknown, IncomingRequestCfProperties>) {\n\t\t\treturn __facade_invoke__(\n\t\t\t\trequest,\n\t\t\t\tthis.env,\n\t\t\t\tthis.ctx,\n\t\t\t\tthis.#dispatcher,\n\t\t\t\tthis.#fetchDispatcher\n\t\t\t);\n\t\t}\n\t};\n}\n\nlet WRAPPED_ENTRY: ExportedHandler | WorkerEntrypointConstructor | undefined;\nif (typeof ENTRY === \"object\") {\n\tWRAPPED_ENTRY = wrapExportedHandler(ENTRY);\n} else if (typeof ENTRY === \"function\") {\n\tWRAPPED_ENTRY = wrapWorkerEntrypoint(ENTRY);\n}\nexport default WRAPPED_ENTRY;\n"],
6
+
"mappings": ";;;;AAAA,IAAM,OAAO,oBAAI,IAAI;AAErB,SAAS,SAAS,SAAS,MAAM;AAChC,QAAM,MACL,mBAAmB,MAChB,UACA,IAAI;AAAA,KACH,OAAO,YAAY,WACjB,IAAI,QAAQ,SAAS,IAAI,IACzB,SACD;AAAA,EACH;AACH,MAAI,IAAI,QAAQ,IAAI,SAAS,SAAS,IAAI,aAAa,UAAU;AAChE,QAAI,CAAC,KAAK,IAAI,IAAI,SAAS,CAAC,GAAG;AAC9B,WAAK,IAAI,IAAI,SAAS,CAAC;AACvB,cAAQ;AAAA,QACP;AAAA,KACO,IAAI,SAAS,CAAC;AAAA;AAAA,MACtB;AAAA,IACD;AAAA,EACD;AACD;AAnBS;AAqBT,WAAW,QAAQ,IAAI,MAAM,WAAW,OAAO;AAAA,EAC9C,MAAM,QAAQ,SAAS,UAAU;AAChC,UAAM,CAAC,SAAS,IAAI,IAAI;AACxB,aAAS,SAAS,IAAI;AACtB,WAAO,QAAQ,MAAM,QAAQ,SAAS,QAAQ;AAAA,EAC/C;AACD,CAAC;;;AC7BM,IAAM,qBAAN,MAAyB;AAAA,EAAhC,OAAgC;AAAA;AAAA;AAAA,EAC9B,YAAY,OAAO,KAAK;AACtB,SAAK,QAAQ;AACb,SAAK,MAAM,MAAM,QAAQ;AAAA,EAC3B;AAAA,EAEA,MAAM,MAAM,SAAS;AACnB,WAAO,IAAI,SAAS,eAAe,EAAE,QAAQ,IAAI,CAAC;AAAA,EACpD;AACF;AAEA,IAAO,cAAQ;AAAA,EACb,MAAM,MAAM,SAAS,KAAK;AACxB,UAAM,MAAM,IAAI,IAAI,QAAQ,GAAG;AAC/B,UAAM,MAAM,IAAI,aAAa,IAAI,KAAK;AAEtC,QAAI,CAAC,KAAK;AACR,aAAO,IAAI,SAAS,qBAAqB,EAAE,QAAQ,IAAI,CAAC;AAAA,IAC1D;AAEA,UAAM,KAAK,IAAI,IAAI,WAAW,GAAG;AACjC,UAAM,MAAM,IAAI,IAAI,IAAI,EAAE;AAC1B,WAAO,IAAI,MAAM,OAAO;AAAA,EAC1B;AACF;;;ACtBA,IAAM,YAAwB,8BAAO,SAAS,KAAK,MAAM,kBAAkB;AAC1E,MAAI;AACH,WAAO,MAAM,cAAc,KAAK,SAAS,GAAG;AAAA,EAC7C,UAAE;AACD,QAAI;AACH,UAAI,QAAQ,SAAS,QAAQ,CAAC,QAAQ,UAAU;AAC/C,cAAM,SAAS,QAAQ,KAAK,UAAU;AACtC,eAAO,EAAE,MAAM,OAAO,KAAK,GAAG,MAAM;AAAA,QAAC;AAAA,MACtC;AAAA,IACD,SAAS,GAAG;AACX,cAAQ,MAAM,4CAA4C,CAAC;AAAA,IAC5D;AAAA,EACD;AACD,GAb8B;AAe9B,IAAO,6CAAQ;;;ACRf,SAAS,YAAY,GAAmB;AACvC,SAAO;AAAA,IACN,MAAM,GAAG;AAAA,IACT,SAAS,GAAG,WAAW,OAAO,CAAC;AAAA,IAC/B,OAAO,GAAG;AAAA,IACV,OAAO,GAAG,UAAU,SAAY,SAAY,YAAY,EAAE,KAAK;AAAA,EAChE;AACD;AAPS;AAUT,IAAM,YAAwB,8BAAO,SAAS,KAAK,MAAM,kBAAkB;AAC1E,MAAI;AACH,WAAO,MAAM,cAAc,KAAK,SAAS,GAAG;AAAA,EAC7C,SAAS,GAAQ;AAChB,UAAM,QAAQ,YAAY,CAAC;AAC3B,WAAO,SAAS,KAAK,OAAO;AAAA,MAC3B,QAAQ;AAAA,MACR,SAAS,EAAE,+BAA+B,OAAO;AAAA,IAClD,CAAC;AAAA,EACF;AACD,GAV8B;AAY9B,IAAO,2CAAQ;;;ACzBJ,IAAM,mCAAmC;AAAA,EAE9B;AAAA,EAAyB;AAC3C;AACA,IAAO,sCAAQ;;;ACcnB,IAAM,wBAAsC,CAAC;AAKtC,SAAS,uBAAuB,MAAqC;AAC3E,wBAAsB,KAAK,GAAG,KAAK,KAAK,CAAC;AAC1C;AAFgB;AAShB,SAAS,uBACR,SACA,KACA,KACA,UACA,iBACsB;AACtB,QAAM,CAAC,MAAM,GAAG,IAAI,IAAI;AACxB,QAAM,gBAAmC;AAAA,IACxC;AAAA,IACA,KAAK,YAAY,QAAQ;AACxB,aAAO,uBAAuB,YAAY,QAAQ,KAAK,UAAU,IAAI;AAAA,IACtE;AAAA,EACD;AACA,SAAO,KAAK,SAAS,KAAK,KAAK,aAAa;AAC7C;AAfS;AAiBF,SAAS,kBACf,SACA,KACA,KACA,UACA,iBACsB;AACtB,SAAO,uBAAuB,SAAS,KAAK,KAAK,UAAU;AAAA,IAC1D,GAAG;AAAA,IACH;AAAA,EACD,CAAC;AACF;AAXgB;;;AC3ChB,IAAM,iCAAN,MAAM,gCAA8D;AAAA,EAGnE,YACU,eACA,MACT,SACC;AAHQ;AACA;AAGT,SAAK,WAAW;AAAA,EACjB;AAAA,EArBD,OAYoE;AAAA;AAAA;AAAA,EAC1D;AAAA,EAUT,UAAU;AACT,QAAI,EAAE,gBAAgB,kCAAiC;AACtD,YAAM,IAAI,UAAU,oBAAoB;AAAA,IACzC;AAEA,SAAK,SAAS;AAAA,EACf;AACD;AAEA,SAAS,oBAAoB,QAA0C;AAEtE,MACC,qCAAqC,UACrC,iCAAiC,WAAW,GAC3C;AACD,WAAO;AAAA,EACR;AAEA,aAAW,cAAc,kCAAkC;AAC1D,wBAAoB,UAAU;AAAA,EAC/B;AAEA,QAAM,kBAA+C,gCACpD,SACA,KACA,KACC;AACD,QAAI,OAAO,UAAU,QAAW;AAC/B,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC9D;AACA,WAAO,OAAO,MAAM,SAAS,KAAK,GAAG;AAAA,EACtC,GATqD;AAWrD,SAAO;AAAA,IACN,GAAG;AAAA,IACH,MAAM,SAAS,KAAK,KAAK;AACxB,YAAM,aAAyB,gCAAU,MAAM,MAAM;AACpD,YAAI,SAAS,eAAe,OAAO,cAAc,QAAW;AAC3D,gBAAM,aAAa,IAAI;AAAA,YACtB,KAAK,IAAI;AAAA,YACT,KAAK,QAAQ;AAAA,YACb,MAAM;AAAA,YAAC;AAAA,UACR;AACA,iBAAO,OAAO,UAAU,YAAY,KAAK,GAAG;AAAA,QAC7C;AAAA,MACD,GAT+B;AAU/B,aAAO,kBAAkB,SAAS,KAAK,KAAK,YAAY,eAAe;AAAA,IACxE;AAAA,EACD;AACD;AAxCS;AA0CT,SAAS,qBACR,OAC8B;AAE9B,MACC,qCAAqC,UACrC,iCAAiC,WAAW,GAC3C;AACD,WAAO;AAAA,EACR;AAEA,aAAW,cAAc,kCAAkC;AAC1D,wBAAoB,UAAU;AAAA,EAC/B;AAGA,SAAO,cAAc,MAAM;AAAA,IAC1B,mBAAyE,wBACxE,SACA,KACA,QACI;AACJ,WAAK,MAAM;AACX,WAAK,MAAM;AACX,UAAI,MAAM,UAAU,QAAW;AAC9B,cAAM,IAAI,MAAM,sDAAsD;AAAA,MACvE;AACA,aAAO,MAAM,MAAM,OAAO;AAAA,IAC3B,GAXyE;AAAA,IAazE,cAA0B,wBAAC,MAAM,SAAS;AACzC,UAAI,SAAS,eAAe,MAAM,cAAc,QAAW;AAC1D,cAAM,aAAa,IAAI;AAAA,UACtB,KAAK,IAAI;AAAA,UACT,KAAK,QAAQ;AAAA,UACb,MAAM;AAAA,UAAC;AAAA,QACR;AACA,eAAO,MAAM,UAAU,UAAU;AAAA,MAClC;AAAA,IACD,GAT0B;AAAA,IAW1B,MAAM,SAAwD;AAC7D,aAAO;AAAA,QACN;AAAA,QACA,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,MACN;AAAA,IACD;AAAA,EACD;AACD;AAnDS;AAqDT,IAAI;AACJ,IAAI,OAAO,wCAAU,UAAU;AAC9B,kBAAgB,oBAAoB,mCAAK;AAC1C,WAAW,OAAO,wCAAU,YAAY;AACvC,kBAAgB,qBAAqB,mCAAK;AAC3C;AACA,IAAO,kCAAQ;",
7
+
"names": []
8
+
}
+1392
docs/plans/2026-01-04-cloudflare-pds.md
+1392
docs/plans/2026-01-04-cloudflare-pds.md
···
1
+
# Cloudflare Durable Objects PDS Implementation Plan
2
+
3
+
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
4
+
5
+
**Goal:** Build a minimal AT Protocol PDS on Cloudflare Durable Objects with zero dependencies.
6
+
7
+
**Architecture:** Each user gets their own Durable Object with SQLite storage. A router Worker maps DIDs to Objects. All crypto uses Web Crypto API (P-256 for signing, SHA-256 for hashing).
8
+
9
+
**Tech Stack:** Cloudflare Workers, Durable Objects, SQLite, Web Crypto API, no npm dependencies.
10
+
11
+
---
12
+
13
+
## Task 1: Project Setup
14
+
15
+
**Files:**
16
+
- Create: `package.json`
17
+
- Create: `wrangler.toml`
18
+
- Create: `src/pds.js`
19
+
20
+
**Step 1: Initialize package.json**
21
+
22
+
```json
23
+
{
24
+
"name": "cloudflare-pds",
25
+
"version": "0.1.0",
26
+
"private": true,
27
+
"scripts": {
28
+
"dev": "wrangler dev",
29
+
"deploy": "wrangler deploy",
30
+
"test": "node test/run.js"
31
+
}
32
+
}
33
+
```
34
+
35
+
**Step 2: Create wrangler.toml**
36
+
37
+
```toml
38
+
name = "atproto-pds"
39
+
main = "src/pds.js"
40
+
compatibility_date = "2024-01-01"
41
+
42
+
[[durable_objects.bindings]]
43
+
name = "PDS"
44
+
class_name = "PersonalDataServer"
45
+
46
+
[[migrations]]
47
+
tag = "v1"
48
+
new_sqlite_classes = ["PersonalDataServer"]
49
+
```
50
+
51
+
**Step 3: Create minimal src/pds.js skeleton**
52
+
53
+
```javascript
54
+
export class PersonalDataServer {
55
+
constructor(state, env) {
56
+
this.state = state
57
+
this.sql = state.storage.sql
58
+
}
59
+
60
+
async fetch(request) {
61
+
return new Response('pds running', { status: 200 })
62
+
}
63
+
}
64
+
65
+
export default {
66
+
async fetch(request, env) {
67
+
const url = new URL(request.url)
68
+
const did = url.searchParams.get('did')
69
+
70
+
if (!did) {
71
+
return new Response('missing did param', { status: 400 })
72
+
}
73
+
74
+
const id = env.PDS.idFromName(did)
75
+
const pds = env.PDS.get(id)
76
+
return pds.fetch(request)
77
+
}
78
+
}
79
+
```
80
+
81
+
**Step 4: Verify it runs**
82
+
83
+
Run: `npx wrangler dev`
84
+
Test: `curl "http://localhost:8787/?did=did:plc:test"`
85
+
Expected: `pds running`
86
+
87
+
**Step 5: Commit**
88
+
89
+
```bash
90
+
git init
91
+
git add -A
92
+
git commit -m "feat: initial project setup with Durable Object skeleton"
93
+
```
94
+
95
+
---
96
+
97
+
## Task 2: CBOR Encoding
98
+
99
+
**Files:**
100
+
- Modify: `src/pds.js`
101
+
102
+
Implement minimal deterministic CBOR encoding. Only the types AT Protocol uses: maps, arrays, strings, bytes, integers, null, booleans.
103
+
104
+
**Step 1: Add CBOR encoding function**
105
+
106
+
Add to top of `src/pds.js`:
107
+
108
+
```javascript
109
+
// === CBOR ENCODING ===
110
+
// Minimal deterministic CBOR (RFC 8949) - sorted keys, minimal integers
111
+
112
+
function cborEncode(value) {
113
+
const parts = []
114
+
115
+
function encode(val) {
116
+
if (val === null) {
117
+
parts.push(0xf6) // null
118
+
} else if (val === true) {
119
+
parts.push(0xf5) // true
120
+
} else if (val === false) {
121
+
parts.push(0xf4) // false
122
+
} else if (typeof val === 'number') {
123
+
encodeInteger(val)
124
+
} else if (typeof val === 'string') {
125
+
const bytes = new TextEncoder().encode(val)
126
+
encodeHead(3, bytes.length) // major type 3 = text string
127
+
parts.push(...bytes)
128
+
} else if (val instanceof Uint8Array) {
129
+
encodeHead(2, val.length) // major type 2 = byte string
130
+
parts.push(...val)
131
+
} else if (Array.isArray(val)) {
132
+
encodeHead(4, val.length) // major type 4 = array
133
+
for (const item of val) encode(item)
134
+
} else if (typeof val === 'object') {
135
+
// Sort keys for deterministic encoding
136
+
const keys = Object.keys(val).sort()
137
+
encodeHead(5, keys.length) // major type 5 = map
138
+
for (const key of keys) {
139
+
encode(key)
140
+
encode(val[key])
141
+
}
142
+
}
143
+
}
144
+
145
+
function encodeHead(majorType, length) {
146
+
const mt = majorType << 5
147
+
if (length < 24) {
148
+
parts.push(mt | length)
149
+
} else if (length < 256) {
150
+
parts.push(mt | 24, length)
151
+
} else if (length < 65536) {
152
+
parts.push(mt | 25, length >> 8, length & 0xff)
153
+
} else if (length < 4294967296) {
154
+
parts.push(mt | 26, (length >> 24) & 0xff, (length >> 16) & 0xff, (length >> 8) & 0xff, length & 0xff)
155
+
}
156
+
}
157
+
158
+
function encodeInteger(n) {
159
+
if (n >= 0) {
160
+
encodeHead(0, n) // major type 0 = unsigned int
161
+
} else {
162
+
encodeHead(1, -n - 1) // major type 1 = negative int
163
+
}
164
+
}
165
+
166
+
encode(value)
167
+
return new Uint8Array(parts)
168
+
}
169
+
```
170
+
171
+
**Step 2: Add simple test endpoint**
172
+
173
+
Modify the fetch handler temporarily:
174
+
175
+
```javascript
176
+
async fetch(request) {
177
+
const url = new URL(request.url)
178
+
if (url.pathname === '/test/cbor') {
179
+
const encoded = cborEncode({ hello: 'world', num: 42 })
180
+
return new Response(encoded, {
181
+
headers: { 'content-type': 'application/cbor' }
182
+
})
183
+
}
184
+
return new Response('pds running', { status: 200 })
185
+
}
186
+
```
187
+
188
+
**Step 3: Verify CBOR output**
189
+
190
+
Run: `npx wrangler dev`
191
+
Test: `curl "http://localhost:8787/test/cbor?did=did:plc:test" | xxd`
192
+
Expected: Valid CBOR bytes (a2 65 68 65 6c 6c 6f 65 77 6f 72 6c 64 63 6e 75 6d 18 2a)
193
+
194
+
**Step 4: Commit**
195
+
196
+
```bash
197
+
git add src/pds.js
198
+
git commit -m "feat: add deterministic CBOR encoding"
199
+
```
200
+
201
+
---
202
+
203
+
## Task 3: CID Generation
204
+
205
+
**Files:**
206
+
- Modify: `src/pds.js`
207
+
208
+
Generate CIDs (Content Identifiers) using SHA-256 + multiformat encoding.
209
+
210
+
**Step 1: Add CID utilities**
211
+
212
+
Add after CBOR section:
213
+
214
+
```javascript
215
+
// === CID GENERATION ===
216
+
// dag-cbor (0x71) + sha-256 (0x12) + 32 bytes
217
+
218
+
async function createCid(bytes) {
219
+
const hash = await crypto.subtle.digest('SHA-256', bytes)
220
+
const hashBytes = new Uint8Array(hash)
221
+
222
+
// CIDv1: version(1) + codec(dag-cbor=0x71) + multihash(sha256)
223
+
// Multihash: hash-type(0x12) + length(0x20=32) + digest
224
+
const cid = new Uint8Array(2 + 2 + 32)
225
+
cid[0] = 0x01 // CIDv1
226
+
cid[1] = 0x71 // dag-cbor codec
227
+
cid[2] = 0x12 // sha-256
228
+
cid[3] = 0x20 // 32 bytes
229
+
cid.set(hashBytes, 4)
230
+
231
+
return cid
232
+
}
233
+
234
+
function cidToString(cid) {
235
+
// base32lower encoding for CIDv1
236
+
return 'b' + base32Encode(cid)
237
+
}
238
+
239
+
function base32Encode(bytes) {
240
+
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
241
+
let result = ''
242
+
let bits = 0
243
+
let value = 0
244
+
245
+
for (const byte of bytes) {
246
+
value = (value << 8) | byte
247
+
bits += 8
248
+
while (bits >= 5) {
249
+
bits -= 5
250
+
result += alphabet[(value >> bits) & 31]
251
+
}
252
+
}
253
+
254
+
if (bits > 0) {
255
+
result += alphabet[(value << (5 - bits)) & 31]
256
+
}
257
+
258
+
return result
259
+
}
260
+
```
261
+
262
+
**Step 2: Add test endpoint**
263
+
264
+
```javascript
265
+
if (url.pathname === '/test/cid') {
266
+
const data = cborEncode({ test: 'data' })
267
+
const cid = await createCid(data)
268
+
return Response.json({ cid: cidToString(cid) })
269
+
}
270
+
```
271
+
272
+
**Step 3: Verify CID generation**
273
+
274
+
Run: `npx wrangler dev`
275
+
Test: `curl "http://localhost:8787/test/cid?did=did:plc:test"`
276
+
Expected: JSON with CID string starting with 'b'
277
+
278
+
**Step 4: Commit**
279
+
280
+
```bash
281
+
git add src/pds.js
282
+
git commit -m "feat: add CID generation with SHA-256"
283
+
```
284
+
285
+
---
286
+
287
+
## Task 4: TID Generation
288
+
289
+
**Files:**
290
+
- Modify: `src/pds.js`
291
+
292
+
Generate TIDs (Timestamp IDs) for record keys and revisions.
293
+
294
+
**Step 1: Add TID utilities**
295
+
296
+
Add after CID section:
297
+
298
+
```javascript
299
+
// === TID GENERATION ===
300
+
// Timestamp-based IDs: base32-sort encoded microseconds + clock ID
301
+
302
+
const TID_CHARS = '234567abcdefghijklmnopqrstuvwxyz'
303
+
let lastTimestamp = 0
304
+
let clockId = Math.floor(Math.random() * 1024)
305
+
306
+
function createTid() {
307
+
let timestamp = Date.now() * 1000 // microseconds
308
+
309
+
// Ensure monotonic
310
+
if (timestamp <= lastTimestamp) {
311
+
timestamp = lastTimestamp + 1
312
+
}
313
+
lastTimestamp = timestamp
314
+
315
+
// 13 chars: 11 for timestamp (64 bits but only ~53 used), 2 for clock ID
316
+
let tid = ''
317
+
318
+
// Encode timestamp (high bits first for sortability)
319
+
let ts = timestamp
320
+
for (let i = 0; i < 11; i++) {
321
+
tid = TID_CHARS[ts & 31] + tid
322
+
ts = Math.floor(ts / 32)
323
+
}
324
+
325
+
// Append clock ID (2 chars)
326
+
tid += TID_CHARS[(clockId >> 5) & 31]
327
+
tid += TID_CHARS[clockId & 31]
328
+
329
+
return tid
330
+
}
331
+
```
332
+
333
+
**Step 2: Add test endpoint**
334
+
335
+
```javascript
336
+
if (url.pathname === '/test/tid') {
337
+
const tids = [createTid(), createTid(), createTid()]
338
+
return Response.json({ tids })
339
+
}
340
+
```
341
+
342
+
**Step 3: Verify TIDs are monotonic**
343
+
344
+
Run: `npx wrangler dev`
345
+
Test: `curl "http://localhost:8787/test/tid?did=did:plc:test"`
346
+
Expected: Three 13-char TIDs, each greater than the previous
347
+
348
+
**Step 4: Commit**
349
+
350
+
```bash
351
+
git add src/pds.js
352
+
git commit -m "feat: add TID generation for record keys"
353
+
```
354
+
355
+
---
356
+
357
+
## Task 5: SQLite Schema
358
+
359
+
**Files:**
360
+
- Modify: `src/pds.js`
361
+
362
+
Initialize the database schema when the Durable Object starts.
363
+
364
+
**Step 1: Add schema initialization**
365
+
366
+
Modify the constructor:
367
+
368
+
```javascript
369
+
export class PersonalDataServer {
370
+
constructor(state, env) {
371
+
this.state = state
372
+
this.sql = state.storage.sql
373
+
this.env = env
374
+
375
+
// Initialize schema
376
+
this.sql.exec(`
377
+
CREATE TABLE IF NOT EXISTS blocks (
378
+
cid TEXT PRIMARY KEY,
379
+
data BLOB NOT NULL
380
+
);
381
+
382
+
CREATE TABLE IF NOT EXISTS records (
383
+
uri TEXT PRIMARY KEY,
384
+
cid TEXT NOT NULL,
385
+
collection TEXT NOT NULL,
386
+
rkey TEXT NOT NULL,
387
+
value BLOB NOT NULL
388
+
);
389
+
390
+
CREATE TABLE IF NOT EXISTS commits (
391
+
seq INTEGER PRIMARY KEY AUTOINCREMENT,
392
+
cid TEXT NOT NULL,
393
+
rev TEXT NOT NULL,
394
+
prev TEXT
395
+
);
396
+
397
+
CREATE TABLE IF NOT EXISTS seq_events (
398
+
seq INTEGER PRIMARY KEY AUTOINCREMENT,
399
+
did TEXT NOT NULL,
400
+
commit_cid TEXT NOT NULL,
401
+
evt BLOB NOT NULL
402
+
);
403
+
404
+
CREATE INDEX IF NOT EXISTS idx_records_collection ON records(collection, rkey);
405
+
`)
406
+
}
407
+
// ... rest of class
408
+
}
409
+
```
410
+
411
+
**Step 2: Add test endpoint to verify schema**
412
+
413
+
```javascript
414
+
if (url.pathname === '/test/schema') {
415
+
const tables = this.sql.exec(`
416
+
SELECT name FROM sqlite_master WHERE type='table' ORDER BY name
417
+
`).toArray()
418
+
return Response.json({ tables: tables.map(t => t.name) })
419
+
}
420
+
```
421
+
422
+
**Step 3: Verify schema creates**
423
+
424
+
Run: `npx wrangler dev`
425
+
Test: `curl "http://localhost:8787/test/schema?did=did:plc:test"`
426
+
Expected: `{"tables":["blocks","commits","records","seq_events"]}`
427
+
428
+
**Step 4: Commit**
429
+
430
+
```bash
431
+
git add src/pds.js
432
+
git commit -m "feat: add SQLite schema for PDS storage"
433
+
```
434
+
435
+
---
436
+
437
+
## Task 6: P-256 Signing
438
+
439
+
**Files:**
440
+
- Modify: `src/pds.js`
441
+
442
+
Add P-256 ECDSA signing using Web Crypto API.
443
+
444
+
**Step 1: Add signing utilities**
445
+
446
+
Add after TID section:
447
+
448
+
```javascript
449
+
// === P-256 SIGNING ===
450
+
// Web Crypto ECDSA with P-256 curve
451
+
452
+
async function importPrivateKey(privateKeyBytes) {
453
+
// PKCS#8 wrapper for raw P-256 private key
454
+
const pkcs8Prefix = new Uint8Array([
455
+
0x30, 0x41, 0x02, 0x01, 0x00, 0x30, 0x13, 0x06, 0x07, 0x2a, 0x86, 0x48,
456
+
0xce, 0x3d, 0x02, 0x01, 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x03,
457
+
0x01, 0x07, 0x04, 0x27, 0x30, 0x25, 0x02, 0x01, 0x01, 0x04, 0x20
458
+
])
459
+
460
+
const pkcs8 = new Uint8Array(pkcs8Prefix.length + 32)
461
+
pkcs8.set(pkcs8Prefix)
462
+
pkcs8.set(privateKeyBytes, pkcs8Prefix.length)
463
+
464
+
return crypto.subtle.importKey(
465
+
'pkcs8',
466
+
pkcs8,
467
+
{ name: 'ECDSA', namedCurve: 'P-256' },
468
+
false,
469
+
['sign']
470
+
)
471
+
}
472
+
473
+
async function sign(privateKey, data) {
474
+
const signature = await crypto.subtle.sign(
475
+
{ name: 'ECDSA', hash: 'SHA-256' },
476
+
privateKey,
477
+
data
478
+
)
479
+
return new Uint8Array(signature)
480
+
}
481
+
482
+
async function generateKeyPair() {
483
+
const keyPair = await crypto.subtle.generateKey(
484
+
{ name: 'ECDSA', namedCurve: 'P-256' },
485
+
true,
486
+
['sign', 'verify']
487
+
)
488
+
489
+
// Export private key as raw bytes
490
+
const privateJwk = await crypto.subtle.exportKey('jwk', keyPair.privateKey)
491
+
const privateBytes = base64UrlDecode(privateJwk.d)
492
+
493
+
// Export public key as compressed point
494
+
const publicRaw = await crypto.subtle.exportKey('raw', keyPair.publicKey)
495
+
const publicBytes = new Uint8Array(publicRaw)
496
+
const compressed = compressPublicKey(publicBytes)
497
+
498
+
return { privateKey: privateBytes, publicKey: compressed }
499
+
}
500
+
501
+
function compressPublicKey(uncompressed) {
502
+
// uncompressed is 65 bytes: 0x04 + x(32) + y(32)
503
+
// compressed is 33 bytes: prefix(02 or 03) + x(32)
504
+
const x = uncompressed.slice(1, 33)
505
+
const y = uncompressed.slice(33, 65)
506
+
const prefix = (y[31] & 1) === 0 ? 0x02 : 0x03
507
+
const compressed = new Uint8Array(33)
508
+
compressed[0] = prefix
509
+
compressed.set(x, 1)
510
+
return compressed
511
+
}
512
+
513
+
function base64UrlDecode(str) {
514
+
const base64 = str.replace(/-/g, '+').replace(/_/g, '/')
515
+
const binary = atob(base64)
516
+
const bytes = new Uint8Array(binary.length)
517
+
for (let i = 0; i < binary.length; i++) {
518
+
bytes[i] = binary.charCodeAt(i)
519
+
}
520
+
return bytes
521
+
}
522
+
523
+
function bytesToHex(bytes) {
524
+
return Array.from(bytes).map(b => b.toString(16).padStart(2, '0')).join('')
525
+
}
526
+
527
+
function hexToBytes(hex) {
528
+
const bytes = new Uint8Array(hex.length / 2)
529
+
for (let i = 0; i < hex.length; i += 2) {
530
+
bytes[i / 2] = parseInt(hex.substr(i, 2), 16)
531
+
}
532
+
return bytes
533
+
}
534
+
```
535
+
536
+
**Step 2: Add test endpoint**
537
+
538
+
```javascript
539
+
if (url.pathname === '/test/sign') {
540
+
const kp = await generateKeyPair()
541
+
const data = new TextEncoder().encode('test message')
542
+
const key = await importPrivateKey(kp.privateKey)
543
+
const sig = await sign(key, data)
544
+
return Response.json({
545
+
publicKey: bytesToHex(kp.publicKey),
546
+
signature: bytesToHex(sig)
547
+
})
548
+
}
549
+
```
550
+
551
+
**Step 3: Verify signing works**
552
+
553
+
Run: `npx wrangler dev`
554
+
Test: `curl "http://localhost:8787/test/sign?did=did:plc:test"`
555
+
Expected: JSON with 66-char public key hex and 128-char signature hex
556
+
557
+
**Step 4: Commit**
558
+
559
+
```bash
560
+
git add src/pds.js
561
+
git commit -m "feat: add P-256 ECDSA signing via Web Crypto"
562
+
```
563
+
564
+
---
565
+
566
+
## Task 7: Identity Storage
567
+
568
+
**Files:**
569
+
- Modify: `src/pds.js`
570
+
571
+
Store DID and signing key in Durable Object storage.
572
+
573
+
**Step 1: Add identity methods to class**
574
+
575
+
Add to PersonalDataServer class:
576
+
577
+
```javascript
578
+
async initIdentity(did, privateKeyHex) {
579
+
await this.state.storage.put('did', did)
580
+
await this.state.storage.put('privateKey', privateKeyHex)
581
+
}
582
+
583
+
async getDid() {
584
+
if (!this._did) {
585
+
this._did = await this.state.storage.get('did')
586
+
}
587
+
return this._did
588
+
}
589
+
590
+
async getSigningKey() {
591
+
const hex = await this.state.storage.get('privateKey')
592
+
if (!hex) return null
593
+
return importPrivateKey(hexToBytes(hex))
594
+
}
595
+
```
596
+
597
+
**Step 2: Add init endpoint**
598
+
599
+
```javascript
600
+
if (url.pathname === '/init') {
601
+
const body = await request.json()
602
+
if (!body.did || !body.privateKey) {
603
+
return Response.json({ error: 'missing did or privateKey' }, { status: 400 })
604
+
}
605
+
await this.initIdentity(body.did, body.privateKey)
606
+
return Response.json({ ok: true, did: body.did })
607
+
}
608
+
```
609
+
610
+
**Step 3: Add status endpoint**
611
+
612
+
```javascript
613
+
if (url.pathname === '/status') {
614
+
const did = await this.getDid()
615
+
return Response.json({
616
+
initialized: !!did,
617
+
did: did || null
618
+
})
619
+
}
620
+
```
621
+
622
+
**Step 4: Verify identity storage**
623
+
624
+
Run: `npx wrangler dev`
625
+
626
+
```bash
627
+
# Check uninitialized
628
+
curl "http://localhost:8787/status?did=did:plc:test"
629
+
# Expected: {"initialized":false,"did":null}
630
+
631
+
# Initialize
632
+
curl -X POST "http://localhost:8787/init?did=did:plc:test" \
633
+
-H "Content-Type: application/json" \
634
+
-d '{"did":"did:plc:test","privateKey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
635
+
# Expected: {"ok":true,"did":"did:plc:test"}
636
+
637
+
# Check initialized
638
+
curl "http://localhost:8787/status?did=did:plc:test"
639
+
# Expected: {"initialized":true,"did":"did:plc:test"}
640
+
```
641
+
642
+
**Step 5: Commit**
643
+
644
+
```bash
645
+
git add src/pds.js
646
+
git commit -m "feat: add identity storage and init endpoint"
647
+
```
648
+
649
+
---
650
+
651
+
## Task 8: MST (Merkle Search Tree)
652
+
653
+
**Files:**
654
+
- Modify: `src/pds.js`
655
+
656
+
Implement simple MST that rebuilds on each write.
657
+
658
+
**Step 1: Add MST utilities**
659
+
660
+
Add after signing section:
661
+
662
+
```javascript
663
+
// === MERKLE SEARCH TREE ===
664
+
// Simple rebuild-on-write implementation
665
+
666
+
async function sha256(data) {
667
+
const hash = await crypto.subtle.digest('SHA-256', data)
668
+
return new Uint8Array(hash)
669
+
}
670
+
671
+
function getKeyDepth(key) {
672
+
// Count leading zeros in hash to determine tree depth
673
+
const keyBytes = new TextEncoder().encode(key)
674
+
// Sync hash for depth calculation (use first bytes of key as proxy)
675
+
let zeros = 0
676
+
for (const byte of keyBytes) {
677
+
if (byte === 0) zeros += 8
678
+
else {
679
+
for (let i = 7; i >= 0; i--) {
680
+
if ((byte >> i) & 1) break
681
+
zeros++
682
+
}
683
+
break
684
+
}
685
+
}
686
+
return Math.floor(zeros / 4)
687
+
}
688
+
689
+
class MST {
690
+
constructor(sql) {
691
+
this.sql = sql
692
+
}
693
+
694
+
async computeRoot() {
695
+
const records = this.sql.exec(`
696
+
SELECT collection, rkey, cid FROM records ORDER BY collection, rkey
697
+
`).toArray()
698
+
699
+
if (records.length === 0) {
700
+
return null
701
+
}
702
+
703
+
const entries = records.map(r => ({
704
+
key: `${r.collection}/${r.rkey}`,
705
+
cid: r.cid
706
+
}))
707
+
708
+
return this.buildTree(entries, 0)
709
+
}
710
+
711
+
async buildTree(entries, depth) {
712
+
if (entries.length === 0) return null
713
+
714
+
const node = { l: null, e: [] }
715
+
let leftEntries = []
716
+
717
+
for (const entry of entries) {
718
+
const keyDepth = getKeyDepth(entry.key)
719
+
720
+
if (keyDepth > depth) {
721
+
leftEntries.push(entry)
722
+
} else {
723
+
// Store accumulated left entries
724
+
if (leftEntries.length > 0) {
725
+
const leftCid = await this.buildTree(leftEntries, depth + 1)
726
+
if (node.e.length === 0) {
727
+
node.l = leftCid
728
+
} else {
729
+
node.e[node.e.length - 1].t = leftCid
730
+
}
731
+
leftEntries = []
732
+
}
733
+
node.e.push({ k: entry.key, v: entry.cid, t: null })
734
+
}
735
+
}
736
+
737
+
// Handle remaining left entries
738
+
if (leftEntries.length > 0) {
739
+
const leftCid = await this.buildTree(leftEntries, depth + 1)
740
+
if (node.e.length > 0) {
741
+
node.e[node.e.length - 1].t = leftCid
742
+
} else {
743
+
node.l = leftCid
744
+
}
745
+
}
746
+
747
+
// Encode and store node
748
+
const nodeBytes = cborEncode(node)
749
+
const nodeCid = await createCid(nodeBytes)
750
+
const cidStr = cidToString(nodeCid)
751
+
752
+
this.sql.exec(
753
+
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
754
+
cidStr,
755
+
nodeBytes
756
+
)
757
+
758
+
return cidStr
759
+
}
760
+
}
761
+
```
762
+
763
+
**Step 2: Add MST test endpoint**
764
+
765
+
```javascript
766
+
if (url.pathname === '/test/mst') {
767
+
// Insert some test records
768
+
this.sql.exec(`INSERT OR REPLACE INTO records VALUES (?, ?, ?, ?, ?)`,
769
+
'at://did:plc:test/app.bsky.feed.post/abc', 'cid1', 'app.bsky.feed.post', 'abc', new Uint8Array([1]))
770
+
this.sql.exec(`INSERT OR REPLACE INTO records VALUES (?, ?, ?, ?, ?)`,
771
+
'at://did:plc:test/app.bsky.feed.post/def', 'cid2', 'app.bsky.feed.post', 'def', new Uint8Array([2]))
772
+
773
+
const mst = new MST(this.sql)
774
+
const root = await mst.computeRoot()
775
+
return Response.json({ root })
776
+
}
777
+
```
778
+
779
+
**Step 3: Verify MST builds**
780
+
781
+
Run: `npx wrangler dev`
782
+
Test: `curl "http://localhost:8787/test/mst?did=did:plc:test"`
783
+
Expected: JSON with root CID string
784
+
785
+
**Step 4: Commit**
786
+
787
+
```bash
788
+
git add src/pds.js
789
+
git commit -m "feat: add Merkle Search Tree implementation"
790
+
```
791
+
792
+
---
793
+
794
+
## Task 9: createRecord Endpoint
795
+
796
+
**Files:**
797
+
- Modify: `src/pds.js`
798
+
799
+
Implement the core write path.
800
+
801
+
**Step 1: Add createRecord method**
802
+
803
+
Add to PersonalDataServer class:
804
+
805
+
```javascript
806
+
async createRecord(collection, record, rkey = null) {
807
+
const did = await this.getDid()
808
+
if (!did) throw new Error('PDS not initialized')
809
+
810
+
rkey = rkey || createTid()
811
+
const uri = `at://${did}/${collection}/${rkey}`
812
+
813
+
// Encode and hash record
814
+
const recordBytes = cborEncode(record)
815
+
const recordCid = await createCid(recordBytes)
816
+
const recordCidStr = cidToString(recordCid)
817
+
818
+
// Store block
819
+
this.sql.exec(
820
+
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
821
+
recordCidStr, recordBytes
822
+
)
823
+
824
+
// Store record index
825
+
this.sql.exec(
826
+
`INSERT OR REPLACE INTO records (uri, cid, collection, rkey, value) VALUES (?, ?, ?, ?, ?)`,
827
+
uri, recordCidStr, collection, rkey, recordBytes
828
+
)
829
+
830
+
// Rebuild MST
831
+
const mst = new MST(this.sql)
832
+
const dataRoot = await mst.computeRoot()
833
+
834
+
// Get previous commit
835
+
const prevCommit = this.sql.exec(
836
+
`SELECT cid, rev FROM commits ORDER BY seq DESC LIMIT 1`
837
+
).one()
838
+
839
+
// Create commit
840
+
const rev = createTid()
841
+
const commit = {
842
+
did,
843
+
version: 3,
844
+
data: dataRoot,
845
+
rev,
846
+
prev: prevCommit?.cid || null
847
+
}
848
+
849
+
// Sign commit
850
+
const commitBytes = cborEncode(commit)
851
+
const signingKey = await this.getSigningKey()
852
+
const sig = await sign(signingKey, commitBytes)
853
+
854
+
const signedCommit = { ...commit, sig }
855
+
const signedBytes = cborEncode(signedCommit)
856
+
const commitCid = await createCid(signedBytes)
857
+
const commitCidStr = cidToString(commitCid)
858
+
859
+
// Store commit block
860
+
this.sql.exec(
861
+
`INSERT OR REPLACE INTO blocks (cid, data) VALUES (?, ?)`,
862
+
commitCidStr, signedBytes
863
+
)
864
+
865
+
// Store commit reference
866
+
this.sql.exec(
867
+
`INSERT INTO commits (cid, rev, prev) VALUES (?, ?, ?)`,
868
+
commitCidStr, rev, prevCommit?.cid || null
869
+
)
870
+
871
+
// Sequence event
872
+
const evt = cborEncode({
873
+
ops: [{ action: 'create', path: `${collection}/${rkey}`, cid: recordCidStr }]
874
+
})
875
+
this.sql.exec(
876
+
`INSERT INTO seq_events (did, commit_cid, evt) VALUES (?, ?, ?)`,
877
+
did, commitCidStr, evt
878
+
)
879
+
880
+
return { uri, cid: recordCidStr, commit: commitCidStr }
881
+
}
882
+
```
883
+
884
+
**Step 2: Add XRPC endpoint**
885
+
886
+
```javascript
887
+
if (url.pathname === '/xrpc/com.atproto.repo.createRecord') {
888
+
if (request.method !== 'POST') {
889
+
return Response.json({ error: 'method not allowed' }, { status: 405 })
890
+
}
891
+
892
+
const body = await request.json()
893
+
if (!body.collection || !body.record) {
894
+
return Response.json({ error: 'missing collection or record' }, { status: 400 })
895
+
}
896
+
897
+
try {
898
+
const result = await this.createRecord(body.collection, body.record, body.rkey)
899
+
return Response.json(result)
900
+
} catch (err) {
901
+
return Response.json({ error: err.message }, { status: 500 })
902
+
}
903
+
}
904
+
```
905
+
906
+
**Step 3: Verify createRecord works**
907
+
908
+
Run: `npx wrangler dev`
909
+
910
+
```bash
911
+
# First initialize
912
+
curl -X POST "http://localhost:8787/init?did=did:plc:test123" \
913
+
-H "Content-Type: application/json" \
914
+
-d '{"did":"did:plc:test123","privateKey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
915
+
916
+
# Create a post
917
+
curl -X POST "http://localhost:8787/xrpc/com.atproto.repo.createRecord?did=did:plc:test123" \
918
+
-H "Content-Type: application/json" \
919
+
-d '{"collection":"app.bsky.feed.post","record":{"text":"Hello world!","createdAt":"2026-01-04T00:00:00Z"}}'
920
+
```
921
+
922
+
Expected: JSON with uri, cid, and commit fields
923
+
924
+
**Step 4: Commit**
925
+
926
+
```bash
927
+
git add src/pds.js
928
+
git commit -m "feat: add createRecord endpoint"
929
+
```
930
+
931
+
---
932
+
933
+
## Task 10: getRecord Endpoint
934
+
935
+
**Files:**
936
+
- Modify: `src/pds.js`
937
+
938
+
**Step 1: Add XRPC endpoint**
939
+
940
+
```javascript
941
+
if (url.pathname === '/xrpc/com.atproto.repo.getRecord') {
942
+
const collection = url.searchParams.get('collection')
943
+
const rkey = url.searchParams.get('rkey')
944
+
945
+
if (!collection || !rkey) {
946
+
return Response.json({ error: 'missing collection or rkey' }, { status: 400 })
947
+
}
948
+
949
+
const did = await this.getDid()
950
+
const uri = `at://${did}/${collection}/${rkey}`
951
+
952
+
const row = this.sql.exec(
953
+
`SELECT cid, value FROM records WHERE uri = ?`, uri
954
+
).one()
955
+
956
+
if (!row) {
957
+
return Response.json({ error: 'record not found' }, { status: 404 })
958
+
}
959
+
960
+
// Decode CBOR for response (minimal decoder)
961
+
const value = cborDecode(row.value)
962
+
963
+
return Response.json({ uri, cid: row.cid, value })
964
+
}
965
+
```
966
+
967
+
**Step 2: Add minimal CBOR decoder**
968
+
969
+
Add after cborEncode:
970
+
971
+
```javascript
972
+
function cborDecode(bytes) {
973
+
let offset = 0
974
+
975
+
function read() {
976
+
const initial = bytes[offset++]
977
+
const major = initial >> 5
978
+
const info = initial & 0x1f
979
+
980
+
let length = info
981
+
if (info === 24) length = bytes[offset++]
982
+
else if (info === 25) { length = (bytes[offset++] << 8) | bytes[offset++] }
983
+
else if (info === 26) {
984
+
length = (bytes[offset++] << 24) | (bytes[offset++] << 16) | (bytes[offset++] << 8) | bytes[offset++]
985
+
}
986
+
987
+
switch (major) {
988
+
case 0: return length // unsigned int
989
+
case 1: return -1 - length // negative int
990
+
case 2: { // byte string
991
+
const data = bytes.slice(offset, offset + length)
992
+
offset += length
993
+
return data
994
+
}
995
+
case 3: { // text string
996
+
const data = new TextDecoder().decode(bytes.slice(offset, offset + length))
997
+
offset += length
998
+
return data
999
+
}
1000
+
case 4: { // array
1001
+
const arr = []
1002
+
for (let i = 0; i < length; i++) arr.push(read())
1003
+
return arr
1004
+
}
1005
+
case 5: { // map
1006
+
const obj = {}
1007
+
for (let i = 0; i < length; i++) {
1008
+
const key = read()
1009
+
obj[key] = read()
1010
+
}
1011
+
return obj
1012
+
}
1013
+
case 7: { // special
1014
+
if (info === 20) return false
1015
+
if (info === 21) return true
1016
+
if (info === 22) return null
1017
+
return undefined
1018
+
}
1019
+
}
1020
+
}
1021
+
1022
+
return read()
1023
+
}
1024
+
```
1025
+
1026
+
**Step 3: Verify getRecord works**
1027
+
1028
+
Run: `npx wrangler dev`
1029
+
1030
+
```bash
1031
+
# Create a record first, then get it
1032
+
curl "http://localhost:8787/xrpc/com.atproto.repo.getRecord?did=did:plc:test123&collection=app.bsky.feed.post&rkey=<rkey_from_create>"
1033
+
```
1034
+
1035
+
Expected: JSON with uri, cid, and value (the original record)
1036
+
1037
+
**Step 4: Commit**
1038
+
1039
+
```bash
1040
+
git add src/pds.js
1041
+
git commit -m "feat: add getRecord endpoint with CBOR decoder"
1042
+
```
1043
+
1044
+
---
1045
+
1046
+
## Task 11: CAR File Builder
1047
+
1048
+
**Files:**
1049
+
- Modify: `src/pds.js`
1050
+
1051
+
Build CAR (Content Addressable aRchive) files for repo export.
1052
+
1053
+
**Step 1: Add CAR builder**
1054
+
1055
+
Add after MST section:
1056
+
1057
+
```javascript
1058
+
// === CAR FILE BUILDER ===
1059
+
1060
+
function varint(n) {
1061
+
const bytes = []
1062
+
while (n >= 0x80) {
1063
+
bytes.push((n & 0x7f) | 0x80)
1064
+
n >>>= 7
1065
+
}
1066
+
bytes.push(n)
1067
+
return new Uint8Array(bytes)
1068
+
}
1069
+
1070
+
function cidToBytes(cidStr) {
1071
+
// Decode base32lower CID string to bytes
1072
+
if (!cidStr.startsWith('b')) throw new Error('expected base32lower CID')
1073
+
return base32Decode(cidStr.slice(1))
1074
+
}
1075
+
1076
+
function base32Decode(str) {
1077
+
const alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
1078
+
let bits = 0
1079
+
let value = 0
1080
+
const output = []
1081
+
1082
+
for (const char of str) {
1083
+
const idx = alphabet.indexOf(char)
1084
+
if (idx === -1) continue
1085
+
value = (value << 5) | idx
1086
+
bits += 5
1087
+
if (bits >= 8) {
1088
+
bits -= 8
1089
+
output.push((value >> bits) & 0xff)
1090
+
}
1091
+
}
1092
+
1093
+
return new Uint8Array(output)
1094
+
}
1095
+
1096
+
function buildCarFile(rootCid, blocks) {
1097
+
const parts = []
1098
+
1099
+
// Header: { version: 1, roots: [rootCid] }
1100
+
const rootCidBytes = cidToBytes(rootCid)
1101
+
const header = cborEncode({ version: 1, roots: [rootCidBytes] })
1102
+
parts.push(varint(header.length))
1103
+
parts.push(header)
1104
+
1105
+
// Blocks: varint(len) + cid + data
1106
+
for (const block of blocks) {
1107
+
const cidBytes = cidToBytes(block.cid)
1108
+
const blockLen = cidBytes.length + block.data.length
1109
+
parts.push(varint(blockLen))
1110
+
parts.push(cidBytes)
1111
+
parts.push(block.data)
1112
+
}
1113
+
1114
+
// Concatenate all parts
1115
+
const totalLen = parts.reduce((sum, p) => sum + p.length, 0)
1116
+
const car = new Uint8Array(totalLen)
1117
+
let offset = 0
1118
+
for (const part of parts) {
1119
+
car.set(part, offset)
1120
+
offset += part.length
1121
+
}
1122
+
1123
+
return car
1124
+
}
1125
+
```
1126
+
1127
+
**Step 2: Commit**
1128
+
1129
+
```bash
1130
+
git add src/pds.js
1131
+
git commit -m "feat: add CAR file builder"
1132
+
```
1133
+
1134
+
---
1135
+
1136
+
## Task 12: getRepo Endpoint
1137
+
1138
+
**Files:**
1139
+
- Modify: `src/pds.js`
1140
+
1141
+
**Step 1: Add XRPC endpoint**
1142
+
1143
+
```javascript
1144
+
if (url.pathname === '/xrpc/com.atproto.sync.getRepo') {
1145
+
const commit = this.sql.exec(
1146
+
`SELECT cid FROM commits ORDER BY seq DESC LIMIT 1`
1147
+
).one()
1148
+
1149
+
if (!commit) {
1150
+
return Response.json({ error: 'repo not found' }, { status: 404 })
1151
+
}
1152
+
1153
+
const blocks = this.sql.exec(`SELECT cid, data FROM blocks`).toArray()
1154
+
const car = buildCarFile(commit.cid, blocks)
1155
+
1156
+
return new Response(car, {
1157
+
headers: { 'content-type': 'application/vnd.ipld.car' }
1158
+
})
1159
+
}
1160
+
```
1161
+
1162
+
**Step 2: Verify getRepo works**
1163
+
1164
+
Run: `npx wrangler dev`
1165
+
1166
+
```bash
1167
+
curl "http://localhost:8787/xrpc/com.atproto.sync.getRepo?did=did:plc:test123" -o repo.car
1168
+
xxd repo.car | head -20
1169
+
```
1170
+
1171
+
Expected: Binary CAR file starting with CBOR header
1172
+
1173
+
**Step 3: Commit**
1174
+
1175
+
```bash
1176
+
git add src/pds.js
1177
+
git commit -m "feat: add getRepo endpoint returning CAR file"
1178
+
```
1179
+
1180
+
---
1181
+
1182
+
## Task 13: subscribeRepos WebSocket
1183
+
1184
+
**Files:**
1185
+
- Modify: `src/pds.js`
1186
+
1187
+
**Step 1: Add WebSocket endpoint**
1188
+
1189
+
```javascript
1190
+
if (url.pathname === '/xrpc/com.atproto.sync.subscribeRepos') {
1191
+
const upgradeHeader = request.headers.get('Upgrade')
1192
+
if (upgradeHeader !== 'websocket') {
1193
+
return new Response('expected websocket', { status: 426 })
1194
+
}
1195
+
1196
+
const { 0: client, 1: server } = new WebSocketPair()
1197
+
this.state.acceptWebSocket(server)
1198
+
1199
+
// Send backlog if cursor provided
1200
+
const cursor = url.searchParams.get('cursor')
1201
+
if (cursor) {
1202
+
const events = this.sql.exec(
1203
+
`SELECT * FROM seq_events WHERE seq > ? ORDER BY seq`,
1204
+
parseInt(cursor)
1205
+
).toArray()
1206
+
1207
+
for (const evt of events) {
1208
+
server.send(this.formatEvent(evt))
1209
+
}
1210
+
}
1211
+
1212
+
return new Response(null, { status: 101, webSocket: client })
1213
+
}
1214
+
```
1215
+
1216
+
**Step 2: Add event formatting and WebSocket handlers**
1217
+
1218
+
Add to PersonalDataServer class:
1219
+
1220
+
```javascript
1221
+
formatEvent(evt) {
1222
+
const did = this.sql.exec(`SELECT did FROM seq_events WHERE seq = ?`, evt.seq).one()?.did
1223
+
1224
+
// AT Protocol frame format: header + body
1225
+
const header = cborEncode({ op: 1, t: '#commit' })
1226
+
const body = cborEncode({
1227
+
seq: evt.seq,
1228
+
rebase: false,
1229
+
tooBig: false,
1230
+
repo: did || evt.did,
1231
+
commit: cidToBytes(evt.commit_cid),
1232
+
rev: createTid(),
1233
+
since: null,
1234
+
blocks: new Uint8Array(0), // Simplified - real impl includes CAR slice
1235
+
ops: cborDecode(evt.evt).ops,
1236
+
blobs: [],
1237
+
time: new Date().toISOString()
1238
+
})
1239
+
1240
+
// Concatenate header + body
1241
+
const frame = new Uint8Array(header.length + body.length)
1242
+
frame.set(header)
1243
+
frame.set(body, header.length)
1244
+
return frame
1245
+
}
1246
+
1247
+
async webSocketMessage(ws, message) {
1248
+
// Handle ping
1249
+
if (message === 'ping') ws.send('pong')
1250
+
}
1251
+
1252
+
async webSocketClose(ws, code, reason) {
1253
+
// Durable Object will hibernate when no connections remain
1254
+
}
1255
+
1256
+
broadcastEvent(evt) {
1257
+
const frame = this.formatEvent(evt)
1258
+
for (const ws of this.state.getWebSockets()) {
1259
+
try {
1260
+
ws.send(frame)
1261
+
} catch (e) {
1262
+
// Client disconnected
1263
+
}
1264
+
}
1265
+
}
1266
+
```
1267
+
1268
+
**Step 3: Update createRecord to broadcast**
1269
+
1270
+
Add at end of createRecord method, before return:
1271
+
1272
+
```javascript
1273
+
// Broadcast to subscribers
1274
+
const evtRow = this.sql.exec(
1275
+
`SELECT * FROM seq_events ORDER BY seq DESC LIMIT 1`
1276
+
).one()
1277
+
if (evtRow) {
1278
+
this.broadcastEvent(evtRow)
1279
+
}
1280
+
```
1281
+
1282
+
**Step 4: Verify WebSocket works**
1283
+
1284
+
Run: `npx wrangler dev`
1285
+
1286
+
Use websocat or similar:
1287
+
```bash
1288
+
websocat "ws://localhost:8787/xrpc/com.atproto.sync.subscribeRepos?did=did:plc:test123"
1289
+
```
1290
+
1291
+
In another terminal, create a record — you should see bytes appear in the WebSocket connection.
1292
+
1293
+
**Step 5: Commit**
1294
+
1295
+
```bash
1296
+
git add src/pds.js
1297
+
git commit -m "feat: add subscribeRepos WebSocket endpoint"
1298
+
```
1299
+
1300
+
---
1301
+
1302
+
## Task 14: Clean Up Test Endpoints
1303
+
1304
+
**Files:**
1305
+
- Modify: `src/pds.js`
1306
+
1307
+
**Step 1: Remove test endpoints**
1308
+
1309
+
Remove all `/test/*` endpoint handlers from the fetch method. Keep only:
1310
+
- `/init`
1311
+
- `/status`
1312
+
- `/xrpc/com.atproto.repo.createRecord`
1313
+
- `/xrpc/com.atproto.repo.getRecord`
1314
+
- `/xrpc/com.atproto.sync.getRepo`
1315
+
- `/xrpc/com.atproto.sync.subscribeRepos`
1316
+
1317
+
**Step 2: Add proper 404 handler**
1318
+
1319
+
```javascript
1320
+
return Response.json({ error: 'not found' }, { status: 404 })
1321
+
```
1322
+
1323
+
**Step 3: Commit**
1324
+
1325
+
```bash
1326
+
git add src/pds.js
1327
+
git commit -m "chore: remove test endpoints, clean up routing"
1328
+
```
1329
+
1330
+
---
1331
+
1332
+
## Task 15: Deploy and Test
1333
+
1334
+
**Step 1: Deploy to Cloudflare**
1335
+
1336
+
```bash
1337
+
npx wrangler deploy
1338
+
```
1339
+
1340
+
**Step 2: Initialize with a real DID**
1341
+
1342
+
Generate a P-256 keypair and create a did:plc (or use existing).
1343
+
1344
+
```bash
1345
+
# Example initialization
1346
+
curl -X POST "https://atproto-pds.<your-subdomain>.workers.dev/init?did=did:plc:yourActualDid" \
1347
+
-H "Content-Type: application/json" \
1348
+
-d '{"did":"did:plc:yourActualDid","privateKey":"your64CharHexPrivateKey"}'
1349
+
```
1350
+
1351
+
**Step 3: Create a test post**
1352
+
1353
+
```bash
1354
+
curl -X POST "https://atproto-pds.<your-subdomain>.workers.dev/xrpc/com.atproto.repo.createRecord?did=did:plc:yourActualDid" \
1355
+
-H "Content-Type: application/json" \
1356
+
-d '{"collection":"app.bsky.feed.post","record":{"$type":"app.bsky.feed.post","text":"Hello from Cloudflare PDS!","createdAt":"2026-01-04T12:00:00.000Z"}}'
1357
+
```
1358
+
1359
+
**Step 4: Verify repo is accessible**
1360
+
1361
+
```bash
1362
+
curl "https://atproto-pds.<your-subdomain>.workers.dev/xrpc/com.atproto.sync.getRepo?did=did:plc:yourActualDid" -o test.car
1363
+
```
1364
+
1365
+
**Step 5: Commit deployment config if needed**
1366
+
1367
+
```bash
1368
+
git add -A
1369
+
git commit -m "chore: ready for deployment"
1370
+
```
1371
+
1372
+
---
1373
+
1374
+
## Summary
1375
+
1376
+
**Total Lines:** ~400 in single file
1377
+
**Dependencies:** Zero
1378
+
**Endpoints:** 4 XRPC + 2 internal
1379
+
1380
+
**What works:**
1381
+
- Create records with proper CIDs
1382
+
- MST for repo structure
1383
+
- P-256 signed commits
1384
+
- CAR file export for relays
1385
+
- WebSocket streaming for real-time sync
1386
+
1387
+
**What's next (future tasks):**
1388
+
- Incremental MST updates
1389
+
- OAuth/JWT authentication
1390
+
- Blob storage (R2)
1391
+
- Handle resolution
1392
+
- DID:PLC registration helper
+1
node_modules/.mf/cf.json
+1
node_modules/.mf/cf.json
···
1
+
{"httpProtocol":"HTTP/1.1","clientAcceptEncoding":"gzip, deflate, br","requestPriority":"","edgeRequestKeepAliveStatus":1,"requestHeaderNames":{},"clientTcpRtt":24,"colo":"SEA","asn":21928,"asOrganization":"T-Mobile USA, Inc.","country":"US","isEUCountry":false,"city":"Seattle","continent":"NA","region":"Washington","regionCode":"WA","timezone":"America/Los_Angeles","longitude":"-122.33207","latitude":"47.60621","postalCode":"98101","metroCode":"819","tlsVersion":"TLSv1.3","tlsCipher":"AEAD-AES256-GCM-SHA384","tlsClientRandom":"FW+sQRqZqHZpEelfAQTRk+5SBBOT8v5GshfKErxQz6Q=","tlsClientCiphersSha1":"JZtiTn8H/ntxORk+XXvU2EvNoz8=","tlsClientExtensionsSha1":"Y7DIC8A6G0/aXviZ8ie/xDbJb7g=","tlsClientExtensionsSha1Le":"6e+q3vPm88rSgMTN/h7WTTxQ2wQ=","tlsExportedAuthenticator":{"clientHandshake":"20d58b760e5aaaf0ace097da0c99e60c48fc1e2addc3a2d0a7e7a3679c83dbc0a209155c93548bea83c23b1cd3c98a9b","serverHandshake":"08a21ccd27344814299207fc42fe6904313b6d70e2b50fa834349973f26ac95957ed88e3b3a11c2568ac6157d467756c","clientFinished":"4b9eb668c3214b8b4c46bc8aa5b7fdf101837cc2e8a6ec6f717054443278647fe66b2d6f18209edd126164270ddbea3d","serverFinished":"e9bd44299ea83b3de051766258dc5d38739547de52e4a01e021a75942b7b6c771ab94832b7ea2d4a10ef5b046ded06f0"},"tlsClientHelloLength":"386","tlsClientAuth":{"certPresented":"0","certVerified":"NONE","certRevoked":"0","certIssuerDN":"","certSubjectDN":"","certIssuerDNRFC2253":"","certSubjectDNRFC2253":"","certIssuerDNLegacy":"","certSubjectDNLegacy":"","certSerial":"","certIssuerSerial":"","certSKI":"","certIssuerSKI":"","certFingerprintSHA1":"","certFingerprintSHA256":"","certNotBefore":"","certNotAfter":""},"verifiedBotCategory":"","botManagement":{"corporateProxy":false,"verifiedBot":false,"jsDetection":{"passed":false},"staticResource":false,"detectionIds":{},"score":99}}
+10
package.json
+10
package.json
+25
src/pds.js
+25
src/pds.js
···
1
+
export class PersonalDataServer {
2
+
constructor(state, env) {
3
+
this.state = state
4
+
this.sql = state.storage.sql
5
+
}
6
+
7
+
async fetch(request) {
8
+
return new Response('pds running', { status: 200 })
9
+
}
10
+
}
11
+
12
+
export default {
13
+
async fetch(request, env) {
14
+
const url = new URL(request.url)
15
+
const did = url.searchParams.get('did')
16
+
17
+
if (!did) {
18
+
return new Response('missing did param', { status: 400 })
19
+
}
20
+
21
+
const id = env.PDS.idFromName(did)
22
+
const pds = env.PDS.get(id)
23
+
return pds.fetch(request)
24
+
}
25
+
}