tangled
alpha
login
or
join now
tylur.dev
/
prototypey
1
fork
atom
prototypey.org - atproto lexicon typescript toolkit - mirror https://github.com/tylersayshi/prototypey
1
fork
atom
overview
issues
pulls
pipelines
inferred gen tests
Tyler
5 months ago
cc75989b
46f917c9
+480
4 changed files
expand all
collapse all
unified
split
packages
cli
tests
commands
gen-inferred.test.ts
fixtures
schemas
app.bsky.actor.profile.json
app.bsky.feed.post.json
app.bsky.feed.searchPosts.json
+360
packages/cli/tests/commands/gen-inferred.test.ts
···
1
1
+
import { expect, test, describe, beforeEach, afterEach } from "vitest";
2
2
+
import { mkdir, writeFile, rm, readFile } from "node:fs/promises";
3
3
+
import { join } from "node:path";
4
4
+
import { genInferred } from "../../src/commands/gen-inferred.ts";
5
5
+
import { tmpdir } from "node:os";
6
6
+
7
7
+
describe("genInferred", () => {
8
8
+
let testDir: string;
9
9
+
let outDir: string;
10
10
+
let schemasDir: string;
11
11
+
12
12
+
beforeEach(async () => {
13
13
+
// Create a temporary directory for test files
14
14
+
testDir = join(tmpdir(), `prototypey-inferred-test-${Date.now()}`);
15
15
+
outDir = join(testDir, "output");
16
16
+
schemasDir = join(testDir, "schemas");
17
17
+
await mkdir(testDir, { recursive: true });
18
18
+
await mkdir(outDir, { recursive: true });
19
19
+
await mkdir(schemasDir, { recursive: true });
20
20
+
});
21
21
+
22
22
+
afterEach(async () => {
23
23
+
// Clean up test directory
24
24
+
await rm(testDir, { recursive: true, force: true });
25
25
+
});
26
26
+
27
27
+
test("generates inferred types from a simple schema", async () => {
28
28
+
// Create a test schema file
29
29
+
const schemaFile = join(schemasDir, "app.bsky.actor.profile.json");
30
30
+
await writeFile(
31
31
+
schemaFile,
32
32
+
JSON.stringify(
33
33
+
{
34
34
+
lexicon: 1,
35
35
+
id: "app.bsky.actor.profile",
36
36
+
defs: {
37
37
+
main: {
38
38
+
type: "record",
39
39
+
key: "self",
40
40
+
record: {
41
41
+
type: "object",
42
42
+
properties: {
43
43
+
displayName: {
44
44
+
type: "string",
45
45
+
maxLength: 64,
46
46
+
maxGraphemes: 64,
47
47
+
},
48
48
+
description: {
49
49
+
type: "string",
50
50
+
maxLength: 256,
51
51
+
maxGraphemes: 256,
52
52
+
},
53
53
+
},
54
54
+
},
55
55
+
},
56
56
+
},
57
57
+
},
58
58
+
null,
59
59
+
"\t",
60
60
+
),
61
61
+
);
62
62
+
63
63
+
// Run the inferred command
64
64
+
await genInferred(outDir, schemaFile);
65
65
+
66
66
+
// Read the generated TypeScript file
67
67
+
const outputFile = join(outDir, "app/bsky/actor/profile.ts");
68
68
+
const content = await readFile(outputFile, "utf-8");
69
69
+
70
70
+
// Verify the generated code structure
71
71
+
expect(content).toContain("// Generated by prototypey - DO NOT EDIT");
72
72
+
expect(content).toContain("// Source: app.bsky.actor.profile");
73
73
+
expect(content).toContain('import type { Infer } from "prototypey"');
74
74
+
expect(content).toContain('with { type: "json" }');
75
75
+
expect(content).toContain("export type Profile = Infer<typeof schema>");
76
76
+
expect(content).toContain("export const ProfileSchema = schema");
77
77
+
expect(content).toContain("export function isProfile(v: unknown): v is Profile");
78
78
+
expect(content).toContain('v.$type === "app.bsky.actor.profile"');
79
79
+
});
80
80
+
81
81
+
test("generates correct directory structure from NSID", async () => {
82
82
+
// Create a test schema with nested NSID
83
83
+
const schemaFile = join(schemasDir, "app.bsky.feed.post.json");
84
84
+
await writeFile(
85
85
+
schemaFile,
86
86
+
JSON.stringify({
87
87
+
lexicon: 1,
88
88
+
id: "app.bsky.feed.post",
89
89
+
defs: {
90
90
+
main: {
91
91
+
type: "record",
92
92
+
key: "tid",
93
93
+
record: {
94
94
+
type: "object",
95
95
+
properties: {
96
96
+
text: { type: "string" },
97
97
+
},
98
98
+
},
99
99
+
},
100
100
+
},
101
101
+
}),
102
102
+
);
103
103
+
104
104
+
await genInferred(outDir, schemaFile);
105
105
+
106
106
+
// Verify the directory structure matches NSID
107
107
+
const outputFile = join(outDir, "app/bsky/feed/post.ts");
108
108
+
const content = await readFile(outputFile, "utf-8");
109
109
+
110
110
+
expect(content).toBeTruthy();
111
111
+
expect(content).toContain("export type Post = Infer<typeof schema>");
112
112
+
});
113
113
+
114
114
+
test("handles multiple schema files with glob patterns", async () => {
115
115
+
// Create multiple schema files
116
116
+
await writeFile(
117
117
+
join(schemasDir, "app.bsky.actor.profile.json"),
118
118
+
JSON.stringify({
119
119
+
lexicon: 1,
120
120
+
id: "app.bsky.actor.profile",
121
121
+
defs: { main: { type: "record" } },
122
122
+
}),
123
123
+
);
124
124
+
125
125
+
await writeFile(
126
126
+
join(schemasDir, "app.bsky.feed.post.json"),
127
127
+
JSON.stringify({
128
128
+
lexicon: 1,
129
129
+
id: "app.bsky.feed.post",
130
130
+
defs: { main: { type: "record" } },
131
131
+
}),
132
132
+
);
133
133
+
134
134
+
// Run with glob pattern
135
135
+
await genInferred(outDir, `${schemasDir}/*.json`);
136
136
+
137
137
+
// Verify both files were created
138
138
+
const profileContent = await readFile(
139
139
+
join(outDir, "app/bsky/actor/profile.ts"),
140
140
+
"utf-8",
141
141
+
);
142
142
+
const postContent = await readFile(
143
143
+
join(outDir, "app/bsky/feed/post.ts"),
144
144
+
"utf-8",
145
145
+
);
146
146
+
147
147
+
expect(profileContent).toContain("export type Profile");
148
148
+
expect(postContent).toContain("export type Post");
149
149
+
});
150
150
+
151
151
+
test("generates correct relative import path", async () => {
152
152
+
// Create a deeply nested schema
153
153
+
const schemaFile = join(schemasDir, "com.atproto.repo.createRecord.json");
154
154
+
await writeFile(
155
155
+
schemaFile,
156
156
+
JSON.stringify({
157
157
+
lexicon: 1,
158
158
+
id: "com.atproto.repo.createRecord",
159
159
+
defs: {
160
160
+
main: {
161
161
+
type: "procedure",
162
162
+
input: { encoding: "application/json" },
163
163
+
},
164
164
+
},
165
165
+
}),
166
166
+
);
167
167
+
168
168
+
await genInferred(outDir, schemaFile);
169
169
+
170
170
+
// Read generated file and check the import path is relative
171
171
+
const outputFile = join(outDir, "com/atproto/repo/createRecord.ts");
172
172
+
const content = await readFile(outputFile, "utf-8");
173
173
+
174
174
+
// The import should be relative to the generated file location
175
175
+
expect(content).toContain('import schema from "');
176
176
+
expect(content).toContain('.json" with { type: "json" }');
177
177
+
// Should navigate up from com/atproto/repo/ to schemas/
178
178
+
expect(content).toMatch(/import schema from ".*createRecord\.json"/);
179
179
+
});
180
180
+
181
181
+
test("generates proper type name from NSID", async () => {
182
182
+
// Test various NSID formats
183
183
+
const testCases = [
184
184
+
{ id: "app.bsky.feed.post", expectedType: "Post" },
185
185
+
{ id: "com.atproto.repo.createRecord", expectedType: "CreateRecord" },
186
186
+
{ id: "app.bsky.actor.profile", expectedType: "Profile" },
187
187
+
{
188
188
+
id: "app.bsky.feed.searchPosts",
189
189
+
expectedType: "SearchPosts",
190
190
+
},
191
191
+
];
192
192
+
193
193
+
for (const { id, expectedType } of testCases) {
194
194
+
const schemaFile = join(schemasDir, `${id}.json`);
195
195
+
await writeFile(
196
196
+
schemaFile,
197
197
+
JSON.stringify({
198
198
+
lexicon: 1,
199
199
+
id,
200
200
+
defs: { main: { type: "record" } },
201
201
+
}),
202
202
+
);
203
203
+
204
204
+
const testOutDir = join(testDir, `out-${id}`);
205
205
+
await mkdir(testOutDir, { recursive: true });
206
206
+
await genInferred(testOutDir, schemaFile);
207
207
+
208
208
+
const nsidParts = id.split(".");
209
209
+
const outputFile = join(testOutDir, ...nsidParts) + ".ts";
210
210
+
const content = await readFile(outputFile, "utf-8");
211
211
+
212
212
+
expect(content).toContain(`export type ${expectedType}`);
213
213
+
expect(content).toContain(`export const ${expectedType}Schema`);
214
214
+
expect(content).toContain(`export function is${expectedType}`);
215
215
+
}
216
216
+
});
217
217
+
218
218
+
test("handles schema without id gracefully", async () => {
219
219
+
// Create an invalid schema without id
220
220
+
const schemaFile = join(schemasDir, "invalid.json");
221
221
+
await writeFile(
222
222
+
schemaFile,
223
223
+
JSON.stringify({
224
224
+
lexicon: 1,
225
225
+
defs: { main: { type: "record" } },
226
226
+
}),
227
227
+
);
228
228
+
229
229
+
// Should not throw, but should skip the file
230
230
+
await expect(genInferred(outDir, schemaFile)).resolves.not.toThrow();
231
231
+
232
232
+
// Output directory should be empty or not contain generated files
233
233
+
const files = await readFile(outDir, "utf-8").catch(() => null);
234
234
+
expect(files).toBeNull();
235
235
+
});
236
236
+
237
237
+
test("handles schema without defs gracefully", async () => {
238
238
+
// Create an invalid schema without defs
239
239
+
const schemaFile = join(schemasDir, "invalid2.json");
240
240
+
await writeFile(
241
241
+
schemaFile,
242
242
+
JSON.stringify({
243
243
+
lexicon: 1,
244
244
+
id: "app.test.invalid",
245
245
+
}),
246
246
+
);
247
247
+
248
248
+
// Should not throw, but should skip the file
249
249
+
await expect(genInferred(outDir, schemaFile)).resolves.not.toThrow();
250
250
+
});
251
251
+
252
252
+
test("processes array of schema patterns", async () => {
253
253
+
// Create schemas in different directories
254
254
+
const schemasDir1 = join(testDir, "schemas1");
255
255
+
const schemasDir2 = join(testDir, "schemas2");
256
256
+
await mkdir(schemasDir1, { recursive: true });
257
257
+
await mkdir(schemasDir2, { recursive: true });
258
258
+
259
259
+
await writeFile(
260
260
+
join(schemasDir1, "app.one.json"),
261
261
+
JSON.stringify({
262
262
+
lexicon: 1,
263
263
+
id: "app.one",
264
264
+
defs: { main: { type: "record" } },
265
265
+
}),
266
266
+
);
267
267
+
268
268
+
await writeFile(
269
269
+
join(schemasDir2, "app.two.json"),
270
270
+
JSON.stringify({
271
271
+
lexicon: 1,
272
272
+
id: "app.two",
273
273
+
defs: { main: { type: "record" } },
274
274
+
}),
275
275
+
);
276
276
+
277
277
+
// Run with array of patterns
278
278
+
await genInferred(outDir, [`${schemasDir1}/*.json`, `${schemasDir2}/*.json`]);
279
279
+
280
280
+
// Verify both were generated
281
281
+
const oneContent = await readFile(join(outDir, "app/one.ts"), "utf-8");
282
282
+
const twoContent = await readFile(join(outDir, "app/two.ts"), "utf-8");
283
283
+
284
284
+
expect(oneContent).toContain("export type One");
285
285
+
expect(twoContent).toContain("export type Two");
286
286
+
});
287
287
+
288
288
+
test("generates code with all required components", async () => {
289
289
+
// Create a comprehensive schema
290
290
+
const schemaFile = join(schemasDir, "app.test.complete.json");
291
291
+
await writeFile(
292
292
+
schemaFile,
293
293
+
JSON.stringify({
294
294
+
lexicon: 1,
295
295
+
id: "app.test.complete",
296
296
+
defs: {
297
297
+
main: {
298
298
+
type: "record",
299
299
+
key: "tid",
300
300
+
record: {
301
301
+
type: "object",
302
302
+
required: ["text"],
303
303
+
properties: {
304
304
+
text: { type: "string", maxLength: 300 },
305
305
+
tags: { type: "array", items: { type: "string" } },
306
306
+
},
307
307
+
},
308
308
+
},
309
309
+
},
310
310
+
}),
311
311
+
);
312
312
+
313
313
+
await genInferred(outDir, schemaFile);
314
314
+
315
315
+
const outputFile = join(outDir, "app/test/complete.ts");
316
316
+
const content = await readFile(outputFile, "utf-8");
317
317
+
318
318
+
// Check all required exports
319
319
+
expect(content).toContain('import type { Infer } from "prototypey"');
320
320
+
expect(content).toContain("export type Complete = Infer<typeof schema>");
321
321
+
expect(content).toContain("export const CompleteSchema = schema");
322
322
+
expect(content).toContain("export function isComplete(v: unknown): v is Complete");
323
323
+
324
324
+
// Check type guard implementation
325
325
+
expect(content).toContain('typeof v === "object"');
326
326
+
expect(content).toContain("v !== null");
327
327
+
expect(content).toContain('"$type" in v');
328
328
+
expect(content).toContain('v.$type === "app.test.complete"');
329
329
+
330
330
+
// Check comments
331
331
+
expect(content).toContain("// Generated by prototypey - DO NOT EDIT");
332
332
+
expect(content).toContain("// Source: app.test.complete");
333
333
+
expect(content).toContain("* Type-inferred from lexicon schema: app.test.complete");
334
334
+
expect(content).toContain("* The lexicon schema object");
335
335
+
expect(content).toContain("* Type guard to check if a value is a Complete");
336
336
+
});
337
337
+
338
338
+
test("handles kebab-case and mixed-case NSID parts", async () => {
339
339
+
// Test NSID with different casing
340
340
+
const schemaFile = join(schemasDir, "app.test.myCustomType.json");
341
341
+
await writeFile(
342
342
+
schemaFile,
343
343
+
JSON.stringify({
344
344
+
lexicon: 1,
345
345
+
id: "app.test.myCustomType",
346
346
+
defs: { main: { type: "record" } },
347
347
+
}),
348
348
+
);
349
349
+
350
350
+
await genInferred(outDir, schemaFile);
351
351
+
352
352
+
const outputFile = join(outDir, "app/test/myCustomType.ts");
353
353
+
const content = await readFile(outputFile, "utf-8");
354
354
+
355
355
+
// Should convert to PascalCase
356
356
+
expect(content).toContain("export type MyCustomType");
357
357
+
expect(content).toContain("export const MyCustomTypeSchema");
358
358
+
expect(content).toContain("export function isMyCustomType");
359
359
+
});
360
360
+
});
+30
packages/cli/tests/fixtures/schemas/app.bsky.actor.profile.json
···
1
1
+
{
2
2
+
"lexicon": 1,
3
3
+
"id": "app.bsky.actor.profile",
4
4
+
"defs": {
5
5
+
"main": {
6
6
+
"type": "record",
7
7
+
"key": "self",
8
8
+
"record": {
9
9
+
"type": "object",
10
10
+
"properties": {
11
11
+
"displayName": {
12
12
+
"type": "string",
13
13
+
"maxLength": 64,
14
14
+
"maxGraphemes": 64
15
15
+
},
16
16
+
"description": {
17
17
+
"type": "string",
18
18
+
"maxLength": 256,
19
19
+
"maxGraphemes": 256
20
20
+
},
21
21
+
"avatar": {
22
22
+
"type": "blob",
23
23
+
"accept": ["image/png", "image/jpeg"],
24
24
+
"maxSize": 1000000
25
25
+
}
26
26
+
}
27
27
+
}
28
28
+
}
29
29
+
}
30
30
+
}
+43
packages/cli/tests/fixtures/schemas/app.bsky.feed.post.json
···
1
1
+
{
2
2
+
"lexicon": 1,
3
3
+
"id": "app.bsky.feed.post",
4
4
+
"defs": {
5
5
+
"main": {
6
6
+
"type": "record",
7
7
+
"key": "tid",
8
8
+
"record": {
9
9
+
"type": "object",
10
10
+
"required": ["text", "createdAt"],
11
11
+
"properties": {
12
12
+
"text": {
13
13
+
"type": "string",
14
14
+
"maxLength": 300,
15
15
+
"maxGraphemes": 300
16
16
+
},
17
17
+
"createdAt": {
18
18
+
"type": "string",
19
19
+
"format": "datetime"
20
20
+
},
21
21
+
"reply": {
22
22
+
"type": "ref",
23
23
+
"ref": "app.bsky.feed.post#replyRef"
24
24
+
}
25
25
+
}
26
26
+
}
27
27
+
},
28
28
+
"replyRef": {
29
29
+
"type": "object",
30
30
+
"required": ["root", "parent"],
31
31
+
"properties": {
32
32
+
"root": {
33
33
+
"type": "ref",
34
34
+
"ref": "com.atproto.repo.strongRef"
35
35
+
},
36
36
+
"parent": {
37
37
+
"type": "ref",
38
38
+
"ref": "com.atproto.repo.strongRef"
39
39
+
}
40
40
+
}
41
41
+
}
42
42
+
}
43
43
+
}
+47
packages/cli/tests/fixtures/schemas/app.bsky.feed.searchPosts.json
···
1
1
+
{
2
2
+
"lexicon": 1,
3
3
+
"id": "app.bsky.feed.searchPosts",
4
4
+
"defs": {
5
5
+
"main": {
6
6
+
"type": "query",
7
7
+
"description": "Find posts matching search criteria",
8
8
+
"parameters": {
9
9
+
"type": "params",
10
10
+
"required": ["q"],
11
11
+
"properties": {
12
12
+
"q": {
13
13
+
"type": "string"
14
14
+
},
15
15
+
"limit": {
16
16
+
"type": "integer",
17
17
+
"minimum": 1,
18
18
+
"maximum": 100,
19
19
+
"default": 25
20
20
+
},
21
21
+
"cursor": {
22
22
+
"type": "string"
23
23
+
}
24
24
+
}
25
25
+
},
26
26
+
"output": {
27
27
+
"encoding": "application/json",
28
28
+
"schema": {
29
29
+
"type": "object",
30
30
+
"required": ["posts"],
31
31
+
"properties": {
32
32
+
"cursor": {
33
33
+
"type": "string"
34
34
+
},
35
35
+
"posts": {
36
36
+
"type": "array",
37
37
+
"items": {
38
38
+
"type": "ref",
39
39
+
"ref": "app.bsky.feed.defs#postView"
40
40
+
}
41
41
+
}
42
42
+
}
43
43
+
}
44
44
+
}
45
45
+
}
46
46
+
}
47
47
+
}