Auto-indexing service and GraphQL API for AT Protocol Records

fix: use all lexicons for ref validation in create/update mutations

Previously, mutation validation only used the lexicon being created/updated
to validate ref fields. This failed when refs pointed to other lexicon types.
Now passes all registered lexicons to validation functions so refs can be
properly validated against their target types.

+722 -169
+389
dev-docs/plans/2025-12-09-fix-lexicon-ref-validation.md
··· 1 + # Fix Lexicon Ref Validation in Mutations Implementation Plan 2 + 3 + > **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. 4 + 5 + **Goal:** Fix GraphQL mutation validation to include all lexicons so refs can be resolved by honk. 6 + 7 + **Architecture:** The mutation resolvers currently pass only the target collection's lexicon to honk for validation. When a lexicon has `ref` fields pointing to other lexicons (e.g., `org.atmosphereconf.profile` references `community.lexicon.location.hthree`), honk fails with "Referenced lexicon not found". The fix is to fetch all lexicons and pass them to `honk.validate_record()`. 8 + 9 + **Tech Stack:** Gleam, honk (lexicon validation library), SQLite 10 + 11 + --- 12 + 13 + ### Task 1: Write failing test for ref validation 14 + 15 + **Files:** 16 + - Modify: `server/test/mutation_resolver_integration_test.gleam` 17 + 18 + **Step 1: Add test helper for lexicon with ref** 19 + 20 + Add after the existing `create_status_lexicon()` function (around line 72): 21 + 22 + ```gleam 23 + // Helper to create a location lexicon (referenced by profile) 24 + fn create_location_lexicon() -> String { 25 + json.object([ 26 + #("lexicon", json.int(1)), 27 + #("id", json.string("community.lexicon.location.hthree")), 28 + #( 29 + "defs", 30 + json.object([ 31 + #( 32 + "main", 33 + json.object([ 34 + #("type", json.string("object")), 35 + #("description", json.string("A physical location in H3 format")), 36 + #( 37 + "properties", 38 + json.object([ 39 + #( 40 + "name", 41 + json.object([ 42 + #("type", json.string("string")), 43 + ]), 44 + ), 45 + #( 46 + "value", 47 + json.object([ 48 + #("type", json.string("string")), 49 + ]), 50 + ), 51 + ]), 52 + ), 53 + #("required", json.array([json.string("value")], of: fn(x) { x })), 54 + ]), 55 + ), 56 + ]), 57 + ), 58 + ]) 59 + |> json.to_string 60 + } 61 + 62 + // Helper to create a profile lexicon that references location 63 + fn create_profile_with_ref_lexicon() -> String { 64 + json.object([ 65 + #("lexicon", json.int(1)), 66 + #("id", json.string("org.atmosphereconf.profile")), 67 + #( 68 + "defs", 69 + json.object([ 70 + #( 71 + "main", 72 + json.object([ 73 + #("type", json.string("record")), 74 + #("key", json.string("literal:self")), 75 + #( 76 + "record", 77 + json.object([ 78 + #("type", json.string("object")), 79 + #( 80 + "properties", 81 + json.object([ 82 + #( 83 + "displayName", 84 + json.object([ 85 + #("type", json.string("string")), 86 + ]), 87 + ), 88 + #( 89 + "homeTown", 90 + json.object([ 91 + #("type", json.string("ref")), 92 + #("ref", json.string("community.lexicon.location.hthree")), 93 + ]), 94 + ), 95 + #( 96 + "createdAt", 97 + json.object([ 98 + #("type", json.string("string")), 99 + #("format", json.string("datetime")), 100 + ]), 101 + ), 102 + ]), 103 + ), 104 + ]), 105 + ), 106 + ]), 107 + ), 108 + ]), 109 + ), 110 + ]) 111 + |> json.to_string 112 + } 113 + ``` 114 + 115 + **Step 2: Add the failing test** 116 + 117 + Add at the end of the test file (before the final closing): 118 + 119 + ```gleam 120 + // Test: Mutation with lexicon ref should validate correctly 121 + pub fn mutation_with_lexicon_ref_validates_test() { 122 + // Setup: Create in-memory database with both lexicons 123 + let assert Ok(db) = sqlight.open(":memory:") 124 + let assert Ok(_) = tables.create_lexicon_table(db) 125 + 126 + // Insert BOTH lexicons - the profile AND the referenced location 127 + let location_lexicon = create_location_lexicon() 128 + let assert Ok(_) = 129 + lexicons.insert(db, "community.lexicon.location.hthree", location_lexicon) 130 + 131 + let profile_lexicon = create_profile_with_ref_lexicon() 132 + let assert Ok(_) = 133 + lexicons.insert(db, "org.atmosphereconf.profile", profile_lexicon) 134 + 135 + let assert Ok(lexicon_records) = lexicons.get_all(db) 136 + let parsed_lexicons = 137 + lexicon_records 138 + |> list.filter_map(fn(lex) { lexicon_graphql.parse_lexicon(lex.json) }) 139 + 140 + let empty_fetcher = fn(_collection, _params) { 141 + Ok(#([], option.None, False, False, option.None)) 142 + } 143 + 144 + // Mock resolver that simulates successful validation and record creation 145 + let mock_create_resolver_factory = fn(_collection: String) { 146 + fn(_ctx: schema.Context) { 147 + Ok( 148 + value.Object([ 149 + #("uri", value.String("at://did:plc:test/org.atmosphereconf.profile/self")), 150 + #("cid", value.String("bafyreimock")), 151 + #("did", value.String("did:plc:test")), 152 + #("collection", value.String("org.atmosphereconf.profile")), 153 + #("indexedAt", value.String("2024-01-01T00:00:00Z")), 154 + ]), 155 + ) 156 + } 157 + } 158 + 159 + let create_factory = option.Some(mock_create_resolver_factory) 160 + 161 + let assert Ok(built_schema) = 162 + database.build_schema_with_fetcher( 163 + parsed_lexicons, 164 + empty_fetcher, 165 + option.None, 166 + option.None, 167 + create_factory, 168 + option.None, 169 + option.None, 170 + option.None, 171 + ) 172 + 173 + // Execute mutation with homeTown field that uses ref type 174 + let mutation = 175 + "mutation { createOrgAtmosphereconfProfile(rkey: \"self\", input: { displayName: \"Test\", homeTown: { name: \"Portland\", value: \"8528f003fffffff\" }, createdAt: \"2024-01-01T00:00:00Z\" }) { uri } }" 176 + 177 + let ctx_data = value.Object([#("auth_token", value.String("mock_token_123"))]) 178 + let ctx = schema.context(option.Some(ctx_data)) 179 + let assert Ok(response) = executor.execute(mutation, built_schema, ctx) 180 + 181 + // Should have no errors - ref should be resolved correctly 182 + response.errors 183 + |> list.length 184 + |> should.equal(0) 185 + } 186 + ``` 187 + 188 + **Step 3: Run test to verify it fails** 189 + 190 + Run: `cd /Users/chadmiller/code/quickslice/server && gleam test -- --only mutation_with_lexicon_ref_validates_test` 191 + 192 + Expected: Test should pass at GraphQL level (mock resolver), but this test validates the schema builds correctly with refs. 193 + 194 + **Step 4: Commit** 195 + 196 + ```bash 197 + git add server/test/mutation_resolver_integration_test.gleam 198 + git commit -m "test: add test for lexicon ref validation in mutations" 199 + ``` 200 + 201 + --- 202 + 203 + ### Task 2: Fix create mutation to use all lexicons 204 + 205 + **Files:** 206 + - Modify: `server/src/mutation_resolvers.gleam:158-179` 207 + 208 + **Step 1: Update the create resolver validation logic** 209 + 210 + Replace lines 158-179 (the validation section) with: 211 + 212 + ```gleam 213 + // Step 6: Validate against lexicon (fetch all lexicons to resolve refs) 214 + use all_lexicon_records <- result.try( 215 + lexicons.get_all(ctx.db) 216 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 217 + ) 218 + 219 + // Parse all lexicon JSON strings 220 + use all_lex_jsons <- result.try( 221 + all_lexicon_records 222 + |> list.try_map(fn(lex) { 223 + honk.parse_json_string(lex.json) 224 + |> result.map_error(fn(e) { 225 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 226 + }) 227 + }), 228 + ) 229 + 230 + // Check that the target collection lexicon exists 231 + case list.find(all_lexicon_records, fn(lex) { lex.id == collection }) { 232 + Ok(_) -> Nil 233 + Error(_) -> { 234 + // Will be caught by honk.validate_record, but early exit is cleaner 235 + } 236 + } 237 + 238 + use _ <- result.try( 239 + honk.validate_record(all_lex_jsons, collection, record_json_value) 240 + |> result.map_error(fn(err) { 241 + "Validation failed: " <> errors.to_string(err) 242 + }), 243 + ) 244 + 245 + { 246 + ``` 247 + 248 + Note: The opening `{` starts a new block since we removed the `case lexicon_records {` and `[lex, ..] -> {` wrapper. 249 + 250 + **Step 2: Find and remove the closing braces** 251 + 252 + Around line 262 (after the original code), find and remove: 253 + 254 + ```gleam 255 + [] -> Error("Lexicon not found for collection: " <> collection) 256 + } 257 + ``` 258 + 259 + **Step 3: Run tests** 260 + 261 + Run: `cd /Users/chadmiller/code/quickslice/server && gleam test` 262 + 263 + Expected: All tests pass 264 + 265 + **Step 4: Commit** 266 + 267 + ```bash 268 + git add server/src/mutation_resolvers.gleam 269 + git commit -m "fix: use all lexicons for create mutation validation 270 + 271 + Fixes 'Referenced lexicon not found' error when validating records 272 + with ref fields pointing to other lexicons." 273 + ``` 274 + 275 + --- 276 + 277 + ### Task 3: Fix update mutation to use all lexicons 278 + 279 + **Files:** 280 + - Modify: `server/src/mutation_resolvers.gleam:368-389` (line numbers will shift after Task 2) 281 + 282 + **Step 1: Update the update resolver validation logic** 283 + 284 + Apply the same pattern as Task 2 to the update mutation. Replace the validation section: 285 + 286 + ```gleam 287 + // Step 6: Validate against lexicon (fetch all lexicons to resolve refs) 288 + use all_lexicon_records <- result.try( 289 + lexicons.get_all(ctx.db) 290 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 291 + ) 292 + 293 + // Parse all lexicon JSON strings 294 + use all_lex_jsons <- result.try( 295 + all_lexicon_records 296 + |> list.try_map(fn(lex) { 297 + honk.parse_json_string(lex.json) 298 + |> result.map_error(fn(e) { 299 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 300 + }) 301 + }), 302 + ) 303 + 304 + use _ <- result.try( 305 + honk.validate_record(all_lex_jsons, collection, record_json_value) 306 + |> result.map_error(fn(err) { 307 + "Validation failed: " <> errors.to_string(err) 308 + }), 309 + ) 310 + 311 + { 312 + ``` 313 + 314 + **Step 2: Remove the closing braces for the old case statement** 315 + 316 + Find and remove around line 453 (adjusted): 317 + 318 + ```gleam 319 + [] -> Error("Lexicon not found for collection: " <> collection) 320 + } 321 + ``` 322 + 323 + **Step 3: Run tests** 324 + 325 + Run: `cd /Users/chadmiller/code/quickslice/server && gleam test` 326 + 327 + Expected: All tests pass 328 + 329 + **Step 4: Commit** 330 + 331 + ```bash 332 + git add server/src/mutation_resolvers.gleam 333 + git commit -m "fix: use all lexicons for update mutation validation" 334 + ``` 335 + 336 + --- 337 + 338 + ### Task 4: Verify fix with manual test 339 + 340 + **Step 1: Build and run the server** 341 + 342 + Run: `cd /Users/chadmiller/code/quickslice/server && gleam build` 343 + 344 + Expected: Build succeeds with no errors 345 + 346 + **Step 2: Test the original failing mutation** 347 + 348 + If you have a running instance, test the original payload: 349 + 350 + ```json 351 + { 352 + "query": "mutation ProfileSettingsPageUpdateProfileMutation($input: OrgAtmosphereconfProfileInput!) { updateOrgAtmosphereconfProfile(rkey: \"self\", input: $input) { uri } }", 353 + "variables": { 354 + "input": { 355 + "displayName": "Chad", 356 + "description": "software engineer", 357 + "createdAt": "2025-12-10T05:33:33.170Z", 358 + "homeTown": { 359 + "name": "Portland, Oregon, United States", 360 + "value": "8528f003fffffff" 361 + } 362 + } 363 + } 364 + } 365 + ``` 366 + 367 + Expected: No "Referenced lexicon not found" error 368 + 369 + **Step 3: Final commit** 370 + 371 + ```bash 372 + git add -A 373 + git commit -m "chore: verify lexicon ref validation fix" 374 + ``` 375 + 376 + --- 377 + 378 + ## Summary 379 + 380 + | Task | Description | Files | 381 + |------|-------------|-------| 382 + | 1 | Add test for lexicon ref validation | `server/test/mutation_resolver_integration_test.gleam` | 383 + | 2 | Fix create mutation validation | `server/src/mutation_resolvers.gleam` | 384 + | 3 | Fix update mutation validation | `server/src/mutation_resolvers.gleam` | 385 + | 4 | Verify fix works | Manual testing | 386 + 387 + **Root Cause:** `honk.validate_record([lex_json], ...)` only received the target collection's lexicon, but lexicons with `ref` fields need all referenced lexicons to be present for resolution. 388 + 389 + **Fix:** Change to `honk.validate_record(all_lex_jsons, ...)` where `all_lex_jsons` contains all lexicons from the database.
+169 -169
server/src/mutation_resolvers.gleam
··· 155 155 let record_json_value = graphql_value_to_json_value(input) 156 156 let record_json_string = json.to_string(record_json_value) 157 157 158 - // Step 6: Validate against lexicon 159 - use lexicon_records <- result.try( 160 - lexicons.get(ctx.db, collection) 161 - |> result.map_error(fn(_) { "Failed to fetch lexicon" }), 158 + // Step 6: Validate against lexicon (fetch all lexicons to resolve refs) 159 + use all_lexicon_records <- result.try( 160 + lexicons.get_all(ctx.db) 161 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 162 162 ) 163 163 164 - case lexicon_records { 165 - [lex, ..] -> { 166 - // Parse lexicon JSON string to Json 167 - use lex_json <- result.try( 168 - honk.parse_json_string(lex.json) 169 - |> result.map_error(fn(e) { 170 - "Failed to parse lexicon JSON: " <> errors.to_string(e) 171 - }), 172 - ) 164 + // Parse all lexicon JSON strings 165 + use all_lex_jsons <- result.try( 166 + all_lexicon_records 167 + |> list.try_map(fn(lex) { 168 + honk.parse_json_string(lex.json) 169 + |> result.map_error(fn(e) { 170 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 171 + }) 172 + }), 173 + ) 173 174 174 - use _ <- result.try( 175 - honk.validate_record([lex_json], collection, record_json_value) 176 - |> result.map_error(fn(err) { 177 - "Validation failed: " <> errors.to_string(err) 178 - }), 179 - ) 175 + use _ <- result.try( 176 + honk.validate_record(all_lex_jsons, collection, record_json_value) 177 + |> result.map_error(fn(err) { 178 + "Validation failed: " <> errors.to_string(err) 179 + }), 180 + ) 180 181 181 - // Step 7: Call createRecord via AT Protocol 182 - // Omit rkey field when not provided to let PDS auto-generate TID 183 - let create_body = 184 - case rkey { 185 - option.Some(r) -> 186 - json.object([ 187 - #("repo", json.string(user_info.did)), 188 - #("collection", json.string(collection)), 189 - #("rkey", json.string(r)), 190 - #("record", graphql_value_to_json_value(input)), 191 - ]) 192 - option.None -> 193 - json.object([ 194 - #("repo", json.string(user_info.did)), 195 - #("collection", json.string(collection)), 196 - #("record", graphql_value_to_json_value(input)), 197 - ]) 198 - } 199 - |> json.to_string 182 + { 183 + // Step 7: Call createRecord via AT Protocol 184 + // Omit rkey field when not provided to let PDS auto-generate TID 185 + let create_body = 186 + case rkey { 187 + option.Some(r) -> 188 + json.object([ 189 + #("repo", json.string(user_info.did)), 190 + #("collection", json.string(collection)), 191 + #("rkey", json.string(r)), 192 + #("record", graphql_value_to_json_value(input)), 193 + ]) 194 + option.None -> 195 + json.object([ 196 + #("repo", json.string(user_info.did)), 197 + #("collection", json.string(collection)), 198 + #("record", graphql_value_to_json_value(input)), 199 + ]) 200 + } 201 + |> json.to_string 200 202 201 - let pds_url = 202 - session.pds_endpoint <> "/xrpc/com.atproto.repo.createRecord" 203 + let pds_url = 204 + session.pds_endpoint <> "/xrpc/com.atproto.repo.createRecord" 203 205 204 - use response <- result.try( 205 - dpop.make_dpop_request("POST", pds_url, session, create_body) 206 - |> result.map_error(fn(_) { "Failed to create record on PDS" }), 207 - ) 206 + use response <- result.try( 207 + dpop.make_dpop_request("POST", pds_url, session, create_body) 208 + |> result.map_error(fn(_) { "Failed to create record on PDS" }), 209 + ) 208 210 209 - // Step 8: Check HTTP status and parse response 210 - use #(uri, cid) <- result.try(case response.status { 211 - 200 | 201 -> { 212 - // Parse successful response 213 - let response_decoder = { 214 - use uri <- decode.field("uri", decode.string) 215 - use cid <- decode.field("cid", decode.string) 216 - decode.success(#(uri, cid)) 217 - } 218 - 219 - json.parse(response.body, response_decoder) 220 - |> result.map_error(fn(_) { 221 - "Failed to parse PDS success response. Body: " <> response.body 222 - }) 223 - } 224 - _ -> { 225 - // Return actual PDS error 226 - Error( 227 - "PDS request failed with status " 228 - <> int.to_string(response.status) 229 - <> ": " 230 - <> response.body, 231 - ) 211 + // Step 8: Check HTTP status and parse response 212 + use #(uri, cid) <- result.try(case response.status { 213 + 200 | 201 -> { 214 + // Parse successful response 215 + let response_decoder = { 216 + use uri <- decode.field("uri", decode.string) 217 + use cid <- decode.field("cid", decode.string) 218 + decode.success(#(uri, cid)) 232 219 } 233 - }) 234 220 235 - // Step 9: Index the created record in the database 236 - use _ <- result.try( 237 - records.insert( 238 - ctx.db, 239 - uri, 240 - cid, 241 - user_info.did, 242 - collection, 243 - record_json_string, 221 + json.parse(response.body, response_decoder) 222 + |> result.map_error(fn(_) { 223 + "Failed to parse PDS success response. Body: " <> response.body 224 + }) 225 + } 226 + _ -> { 227 + // Return actual PDS error 228 + Error( 229 + "PDS request failed with status " 230 + <> int.to_string(response.status) 231 + <> ": " 232 + <> response.body, 244 233 ) 245 - |> result.map_error(fn(_) { "Failed to index record in database" }), 246 - ) 234 + } 235 + }) 247 236 248 - // Step 10: Return the created record as a GraphQL value 249 - // The field resolvers expect the record data under "value" key 250 - // (same structure as query results) 251 - Ok( 252 - value.Object([ 253 - #("uri", value.String(uri)), 254 - #("cid", value.String(cid)), 255 - #("did", value.String(user_info.did)), 256 - #("collection", value.String(collection)), 257 - #("indexedAt", value.String("")), 258 - #("value", input), 259 - ]), 237 + // Step 9: Index the created record in the database 238 + use _ <- result.try( 239 + records.insert( 240 + ctx.db, 241 + uri, 242 + cid, 243 + user_info.did, 244 + collection, 245 + record_json_string, 260 246 ) 261 - } 262 - [] -> Error("Lexicon not found for collection: " <> collection) 247 + |> result.map_error(fn(_) { "Failed to index record in database" }), 248 + ) 249 + 250 + // Step 10: Return the created record as a GraphQL value 251 + // The field resolvers expect the record data under "value" key 252 + // (same structure as query results) 253 + Ok( 254 + value.Object([ 255 + #("uri", value.String(uri)), 256 + #("cid", value.String(cid)), 257 + #("did", value.String(user_info.did)), 258 + #("collection", value.String(collection)), 259 + #("indexedAt", value.String("")), 260 + #("value", input), 261 + ]), 262 + ) 263 263 } 264 264 } 265 265 } ··· 365 365 let record_json_value = graphql_value_to_json_value(input) 366 366 let record_json_string = json.to_string(record_json_value) 367 367 368 - // Step 6: Validate against lexicon 369 - use lexicon_records <- result.try( 370 - lexicons.get(ctx.db, collection) 371 - |> result.map_error(fn(_) { "Failed to fetch lexicon" }), 368 + // Step 6: Validate against lexicon (fetch all lexicons to resolve refs) 369 + use all_lexicon_records <- result.try( 370 + lexicons.get_all(ctx.db) 371 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 372 372 ) 373 373 374 - case lexicon_records { 375 - [lex, ..] -> { 376 - // Parse lexicon JSON string to Json 377 - use lex_json <- result.try( 378 - honk.parse_json_string(lex.json) 379 - |> result.map_error(fn(e) { 380 - "Failed to parse lexicon JSON: " <> errors.to_string(e) 381 - }), 382 - ) 383 - 384 - use _ <- result.try( 385 - honk.validate_record([lex_json], collection, record_json_value) 386 - |> result.map_error(fn(err) { 387 - "Validation failed: " <> errors.to_string(err) 388 - }), 389 - ) 374 + // Parse all lexicon JSON strings 375 + use all_lex_jsons <- result.try( 376 + all_lexicon_records 377 + |> list.try_map(fn(lex) { 378 + honk.parse_json_string(lex.json) 379 + |> result.map_error(fn(e) { 380 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 381 + }) 382 + }), 383 + ) 390 384 391 - // Step 7: Call putRecord via AT Protocol 392 - let update_body = 393 - json.object([ 394 - #("repo", json.string(user_info.did)), 395 - #("collection", json.string(collection)), 396 - #("rkey", json.string(rkey)), 397 - #("record", graphql_value_to_json_value(input)), 398 - ]) 399 - |> json.to_string 385 + use _ <- result.try( 386 + honk.validate_record(all_lex_jsons, collection, record_json_value) 387 + |> result.map_error(fn(err) { 388 + "Validation failed: " <> errors.to_string(err) 389 + }), 390 + ) 400 391 401 - let pds_url = session.pds_endpoint <> "/xrpc/com.atproto.repo.putRecord" 392 + { 393 + // Step 7: Call putRecord via AT Protocol 394 + let update_body = 395 + json.object([ 396 + #("repo", json.string(user_info.did)), 397 + #("collection", json.string(collection)), 398 + #("rkey", json.string(rkey)), 399 + #("record", graphql_value_to_json_value(input)), 400 + ]) 401 + |> json.to_string 402 402 403 - use response <- result.try( 404 - dpop.make_dpop_request("POST", pds_url, session, update_body) 405 - |> result.map_error(fn(_) { "Failed to update record on PDS" }), 406 - ) 403 + let pds_url = session.pds_endpoint <> "/xrpc/com.atproto.repo.putRecord" 407 404 408 - // Step 8: Check HTTP status and parse response 409 - use #(uri, cid) <- result.try(case response.status { 410 - 200 | 201 -> { 411 - // Parse successful response 412 - let response_decoder = { 413 - use uri <- decode.field("uri", decode.string) 414 - use cid <- decode.field("cid", decode.string) 415 - decode.success(#(uri, cid)) 416 - } 405 + use response <- result.try( 406 + dpop.make_dpop_request("POST", pds_url, session, update_body) 407 + |> result.map_error(fn(_) { "Failed to update record on PDS" }), 408 + ) 417 409 418 - json.parse(response.body, response_decoder) 419 - |> result.map_error(fn(_) { 420 - "Failed to parse PDS success response. Body: " <> response.body 421 - }) 422 - } 423 - _ -> { 424 - // Return actual PDS error 425 - Error( 426 - "PDS request failed with status " 427 - <> int.to_string(response.status) 428 - <> ": " 429 - <> response.body, 430 - ) 410 + // Step 8: Check HTTP status and parse response 411 + use #(uri, cid) <- result.try(case response.status { 412 + 200 | 201 -> { 413 + // Parse successful response 414 + let response_decoder = { 415 + use uri <- decode.field("uri", decode.string) 416 + use cid <- decode.field("cid", decode.string) 417 + decode.success(#(uri, cid)) 431 418 } 432 - }) 433 419 434 - // Step 9: Update the record in the database 435 - use _ <- result.try( 436 - records.update(ctx.db, uri, cid, record_json_string) 437 - |> result.map_error(fn(_) { "Failed to update record in database" }), 438 - ) 420 + json.parse(response.body, response_decoder) 421 + |> result.map_error(fn(_) { 422 + "Failed to parse PDS success response. Body: " <> response.body 423 + }) 424 + } 425 + _ -> { 426 + // Return actual PDS error 427 + Error( 428 + "PDS request failed with status " 429 + <> int.to_string(response.status) 430 + <> ": " 431 + <> response.body, 432 + ) 433 + } 434 + }) 439 435 440 - // Step 10: Return the updated record as a GraphQL value 441 - // The field resolvers expect the record data under "value" key 442 - Ok( 443 - value.Object([ 444 - #("uri", value.String(uri)), 445 - #("cid", value.String(cid)), 446 - #("did", value.String(user_info.did)), 447 - #("collection", value.String(collection)), 448 - #("indexedAt", value.String("")), 449 - #("value", input), 450 - ]), 451 - ) 452 - } 453 - [] -> Error("Lexicon not found for collection: " <> collection) 436 + // Step 9: Update the record in the database 437 + use _ <- result.try( 438 + records.update(ctx.db, uri, cid, record_json_string) 439 + |> result.map_error(fn(_) { "Failed to update record in database" }), 440 + ) 441 + 442 + // Step 10: Return the updated record as a GraphQL value 443 + // The field resolvers expect the record data under "value" key 444 + Ok( 445 + value.Object([ 446 + #("uri", value.String(uri)), 447 + #("cid", value.String(cid)), 448 + #("did", value.String(user_info.did)), 449 + #("collection", value.String(collection)), 450 + #("indexedAt", value.String("")), 451 + #("value", input), 452 + ]), 453 + ) 454 454 } 455 455 } 456 456 }
+164
server/test/mutation_resolver_integration_test.gleam
··· 71 71 |> json.to_string 72 72 } 73 73 74 + // Helper to create a location lexicon (referenced by profile) 75 + fn create_location_lexicon() -> String { 76 + json.object([ 77 + #("lexicon", json.int(1)), 78 + #("id", json.string("community.lexicon.location.hthree")), 79 + #( 80 + "defs", 81 + json.object([ 82 + #( 83 + "main", 84 + json.object([ 85 + #("type", json.string("object")), 86 + #("description", json.string("A physical location in H3 format")), 87 + #( 88 + "properties", 89 + json.object([ 90 + #( 91 + "name", 92 + json.object([ 93 + #("type", json.string("string")), 94 + ]), 95 + ), 96 + #( 97 + "value", 98 + json.object([ 99 + #("type", json.string("string")), 100 + ]), 101 + ), 102 + ]), 103 + ), 104 + #("required", json.array([json.string("value")], of: fn(x) { x })), 105 + ]), 106 + ), 107 + ]), 108 + ), 109 + ]) 110 + |> json.to_string 111 + } 112 + 113 + // Helper to create a profile lexicon that references location 114 + fn create_profile_with_ref_lexicon() -> String { 115 + json.object([ 116 + #("lexicon", json.int(1)), 117 + #("id", json.string("org.atmosphereconf.profile")), 118 + #( 119 + "defs", 120 + json.object([ 121 + #( 122 + "main", 123 + json.object([ 124 + #("type", json.string("record")), 125 + #("key", json.string("literal:self")), 126 + #( 127 + "record", 128 + json.object([ 129 + #("type", json.string("object")), 130 + #( 131 + "properties", 132 + json.object([ 133 + #( 134 + "displayName", 135 + json.object([ 136 + #("type", json.string("string")), 137 + ]), 138 + ), 139 + #( 140 + "homeTown", 141 + json.object([ 142 + #("type", json.string("ref")), 143 + #( 144 + "ref", 145 + json.string("community.lexicon.location.hthree"), 146 + ), 147 + ]), 148 + ), 149 + #( 150 + "createdAt", 151 + json.object([ 152 + #("type", json.string("string")), 153 + #("format", json.string("datetime")), 154 + ]), 155 + ), 156 + ]), 157 + ), 158 + ]), 159 + ), 160 + ]), 161 + ), 162 + ]), 163 + ), 164 + ]) 165 + |> json.to_string 166 + } 167 + 74 168 // Mock resolver factory that returns success without making AT Protocol calls 75 169 fn mock_create_resolver_factory(_collection: String) -> schema.Resolver { 76 170 fn(ctx: schema.Context) -> Result(value.Value, String) { ··· 707 801 False -> should.fail() 708 802 } 709 803 } 804 + 805 + // Test: Mutation with lexicon ref should validate correctly 806 + pub fn mutation_with_lexicon_ref_validates_test() { 807 + // Setup: Create in-memory database with both lexicons 808 + let assert Ok(db) = sqlight.open(":memory:") 809 + let assert Ok(_) = tables.create_lexicon_table(db) 810 + 811 + // Insert BOTH lexicons - the profile AND the referenced location 812 + let location_lexicon = create_location_lexicon() 813 + let assert Ok(_) = 814 + lexicons.insert(db, "community.lexicon.location.hthree", location_lexicon) 815 + 816 + let profile_lexicon = create_profile_with_ref_lexicon() 817 + let assert Ok(_) = 818 + lexicons.insert(db, "org.atmosphereconf.profile", profile_lexicon) 819 + 820 + let assert Ok(lexicon_records) = lexicons.get_all(db) 821 + let parsed_lexicons = 822 + lexicon_records 823 + |> list.filter_map(fn(lex) { lexicon_graphql.parse_lexicon(lex.json) }) 824 + 825 + let empty_fetcher = fn(_collection, _params) { 826 + Ok(#([], option.None, False, False, option.None)) 827 + } 828 + 829 + // Mock resolver that simulates successful validation and record creation 830 + let mock_create_resolver_factory = fn(_collection: String) { 831 + fn(_ctx: schema.Context) { 832 + Ok( 833 + value.Object([ 834 + #( 835 + "uri", 836 + value.String("at://did:plc:test/org.atmosphereconf.profile/self"), 837 + ), 838 + #("cid", value.String("bafyreimock")), 839 + #("did", value.String("did:plc:test")), 840 + #("collection", value.String("org.atmosphereconf.profile")), 841 + #("indexedAt", value.String("2024-01-01T00:00:00Z")), 842 + ]), 843 + ) 844 + } 845 + } 846 + 847 + let create_factory = option.Some(mock_create_resolver_factory) 848 + 849 + let assert Ok(built_schema) = 850 + database.build_schema_with_fetcher( 851 + parsed_lexicons, 852 + empty_fetcher, 853 + option.None, 854 + option.None, 855 + create_factory, 856 + option.None, 857 + option.None, 858 + option.None, 859 + ) 860 + 861 + // Execute mutation with homeTown field that uses ref type 862 + let mutation = 863 + "mutation { createOrgAtmosphereconfProfile(rkey: \"self\", input: { displayName: \"Test\", homeTown: { name: \"Portland\", value: \"8528f003fffffff\" }, createdAt: \"2024-01-01T00:00:00Z\" }) { uri } }" 864 + 865 + let ctx_data = value.Object([#("auth_token", value.String("mock_token_123"))]) 866 + let ctx = schema.context(option.Some(ctx_data)) 867 + let assert Ok(response) = executor.execute(mutation, built_schema, ctx) 868 + 869 + // Should have no errors - ref should be resolved correctly 870 + response.errors 871 + |> list.length 872 + |> should.equal(0) 873 + }