Auto-indexing service and GraphQL API for AT Protocol Records quickslice.slices.network/
atproto gleam graphql

refactor: reorganize lexicon GraphQL into modular structure

Split graphql_gleam.gleam (754 lines) and mutation_resolvers.gleam (1023 lines)
into 4 focused modules under graphql/lexicon/:

- converters.gleam: Record → GraphQL value conversion, JSON parsing
- fetchers.gleam: Database fetcher functions for lexicon_graphql
- mutations.gleam: Mutation resolver factories with shared auth helper
- schema.gleam: Public API entry point (build_schema, execute_query)

Consolidates repeated auth code (~40 lines x 4 resolvers) into single helper.

+3429 -1810
+1855
dev-docs/plans/2025-12-10-lexicon-graphql-refactor.md
··· 1 + # Lexicon GraphQL Refactor Implementation Plan 2 + 3 + > **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. 4 + 5 + **Goal:** Refactor `graphql_gleam.gleam` (754 lines) and `mutation_resolvers.gleam` (1023 lines) into a modular structure under `graphql/lexicon/`, mirroring the successful `graphql/admin/` pattern. 6 + 7 + **Architecture:** Split into 4 modules by concern: converters (data transformation), fetchers (database data retrieval), mutations (resolver factories with shared auth helper), and schema (public API entry point). Extract repeated auth code (~40 lines x 4 resolvers) into a single private helper. 8 + 9 + **Tech Stack:** Gleam, swell (GraphQL library), lexicon_graphql, sqlight 10 + 11 + --- 12 + 13 + ### Task 1: Create converters.gleam 14 + 15 + **Files:** 16 + - Create: `server/src/graphql/lexicon/converters.gleam` 17 + 18 + **Step 1: Create the directory** 19 + 20 + Run: `mkdir -p /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server/src/graphql/lexicon` 21 + 22 + **Step 2: Create converters.gleam with value conversion functions** 23 + 24 + ```gleam 25 + /// Value converters for lexicon GraphQL API 26 + /// 27 + /// Transform database records and dynamic values to GraphQL value.Value objects 28 + import database/repositories/actors 29 + import database/types 30 + import gleam/dict 31 + import gleam/dynamic 32 + import gleam/dynamic/decode 33 + import gleam/json 34 + import gleam/list 35 + import gleam/result 36 + import sqlight 37 + import swell/value 38 + 39 + /// Convert a database Record to a GraphQL value.Value 40 + /// 41 + /// Creates an Object with all the record metadata plus the parsed JSON value 42 + pub fn record_to_graphql_value( 43 + record: types.Record, 44 + db: sqlight.Connection, 45 + ) -> value.Value { 46 + // Parse the record JSON and convert to GraphQL value 47 + let value_object = case parse_json_to_value(record.json) { 48 + Ok(val) -> val 49 + Error(_) -> value.Object([]) 50 + // Fallback to empty object on parse error 51 + } 52 + 53 + // Look up actor handle from actor table 54 + let actor_handle = case actors.get(db, record.did) { 55 + Ok([actor, ..]) -> value.String(actor.handle) 56 + _ -> value.Null 57 + } 58 + 59 + // Create the full record object with metadata and value 60 + value.Object([ 61 + #("uri", value.String(record.uri)), 62 + #("cid", value.String(record.cid)), 63 + #("did", value.String(record.did)), 64 + #("collection", value.String(record.collection)), 65 + #("indexedAt", value.String(record.indexed_at)), 66 + #("actorHandle", actor_handle), 67 + #("value", value_object), 68 + ]) 69 + } 70 + 71 + /// Parse a JSON string and convert it to a GraphQL value.Value 72 + pub fn parse_json_to_value(json_str: String) -> Result(value.Value, String) { 73 + // Parse JSON string to dynamic value 74 + case json.parse(json_str, decode.dynamic) { 75 + Ok(dyn) -> Ok(dynamic_to_value(dyn)) 76 + Error(_) -> Error("Failed to parse JSON") 77 + } 78 + } 79 + 80 + /// Convert a dynamic value to a GraphQL value.Value 81 + pub fn dynamic_to_value(dyn: dynamic.Dynamic) -> value.Value { 82 + // Try different decoders in order 83 + case decode.run(dyn, decode.string) { 84 + Ok(s) -> value.String(s) 85 + Error(_) -> 86 + case decode.run(dyn, decode.int) { 87 + Ok(i) -> value.Int(i) 88 + Error(_) -> 89 + case decode.run(dyn, decode.float) { 90 + Ok(f) -> value.Float(f) 91 + Error(_) -> 92 + case decode.run(dyn, decode.bool) { 93 + Ok(b) -> value.Boolean(b) 94 + Error(_) -> 95 + case decode.run(dyn, decode.list(decode.dynamic)) { 96 + Ok(items) -> { 97 + let converted_items = list.map(items, dynamic_to_value) 98 + value.List(converted_items) 99 + } 100 + Error(_) -> 101 + case 102 + decode.run( 103 + dyn, 104 + decode.dict(decode.string, decode.dynamic), 105 + ) 106 + { 107 + Ok(dict) -> { 108 + let fields = 109 + dict 110 + |> dict.to_list 111 + |> list.map(fn(entry) { 112 + let #(key, val) = entry 113 + #(key, dynamic_to_value(val)) 114 + }) 115 + value.Object(fields) 116 + } 117 + Error(_) -> value.Null 118 + } 119 + } 120 + } 121 + } 122 + } 123 + } 124 + } 125 + 126 + /// Convert a dynamic JSON value to graphql value.Value 127 + pub fn json_dynamic_to_value(dyn: dynamic.Dynamic) -> value.Value { 128 + // Try different decoders in order 129 + case decode.run(dyn, decode.string) { 130 + Ok(s) -> value.String(s) 131 + Error(_) -> 132 + case decode.run(dyn, decode.int) { 133 + Ok(i) -> value.Int(i) 134 + Error(_) -> 135 + case decode.run(dyn, decode.float) { 136 + Ok(f) -> value.Float(f) 137 + Error(_) -> 138 + case decode.run(dyn, decode.bool) { 139 + Ok(b) -> value.Boolean(b) 140 + Error(_) -> 141 + // Try as a list 142 + case decode.run(dyn, decode.list(decode.dynamic)) { 143 + Ok(items) -> 144 + value.List(list.map(items, json_dynamic_to_value)) 145 + Error(_) -> 146 + // Try as an object (dict) 147 + case 148 + decode.run( 149 + dyn, 150 + decode.dict(decode.string, decode.dynamic), 151 + ) 152 + { 153 + Ok(d) -> 154 + value.Object( 155 + list.map(dict.to_list(d), fn(pair) { 156 + #(pair.0, json_dynamic_to_value(pair.1)) 157 + }), 158 + ) 159 + Error(_) -> value.Null 160 + } 161 + } 162 + } 163 + } 164 + } 165 + } 166 + } 167 + 168 + /// Extract a reference URI from a record's JSON 169 + /// This handles both simple string fields (at-uri) and strongRef objects 170 + pub fn extract_reference_uri( 171 + json_str: String, 172 + field_name: String, 173 + ) -> Result(String, Nil) { 174 + // Parse the JSON 175 + case parse_json_to_value(json_str) { 176 + Ok(value.Object(fields)) -> { 177 + // Find the field 178 + case list.key_find(fields, field_name) { 179 + Ok(value.String(uri)) -> Ok(uri) 180 + Ok(value.Object(ref_fields)) -> { 181 + // Handle strongRef: { "uri": "...", "cid": "..." } 182 + case list.key_find(ref_fields, "uri") { 183 + Ok(value.String(uri)) -> Ok(uri) 184 + _ -> Error(Nil) 185 + } 186 + } 187 + _ -> Error(Nil) 188 + } 189 + } 190 + _ -> Error(Nil) 191 + } 192 + } 193 + ``` 194 + 195 + **Step 3: Run gleam check to verify syntax** 196 + 197 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam check` 198 + Expected: No errors for converters.gleam 199 + 200 + **Step 4: Commit** 201 + 202 + ```bash 203 + git add src/graphql/lexicon/converters.gleam 204 + git commit -m "refactor: extract lexicon GraphQL converters to separate module" 205 + ``` 206 + 207 + --- 208 + 209 + ### Task 2: Create fetchers.gleam 210 + 211 + **Files:** 212 + - Create: `server/src/graphql/lexicon/fetchers.gleam` 213 + 214 + **Step 1: Create fetchers.gleam with all database fetcher functions** 215 + 216 + ```gleam 217 + /// Database fetchers for lexicon GraphQL API 218 + /// 219 + /// These functions bridge the database layer to the lexicon_graphql library's 220 + /// expected fetcher signatures for queries, joins, and aggregations. 221 + import database/queries/aggregates 222 + import database/queries/pagination 223 + import database/repositories/actors 224 + import database/repositories/records 225 + import database/types 226 + import gleam/dict 227 + import gleam/list 228 + import gleam/option 229 + import gleam/result 230 + import gleam/string 231 + import graphql/lexicon/converters 232 + import graphql/where_converter 233 + import lexicon_graphql/input/aggregate 234 + import lexicon_graphql/query/dataloader 235 + import lexicon_graphql/schema/database 236 + import sqlight 237 + import swell/value 238 + 239 + /// Create a record fetcher for paginated collection queries 240 + pub fn record_fetcher(db: sqlight.Connection) { 241 + fn( 242 + collection_nsid: String, 243 + pagination_params: dataloader.PaginationParams, 244 + ) -> Result( 245 + #( 246 + List(#(value.Value, String)), 247 + option.Option(String), 248 + Bool, 249 + Bool, 250 + option.Option(Int), 251 + ), 252 + String, 253 + ) { 254 + // Convert where clause from GraphQL types to SQL types 255 + let where_clause = case pagination_params.where { 256 + option.Some(graphql_where) -> 257 + option.Some(where_converter.convert_where_clause(graphql_where)) 258 + option.None -> option.None 259 + } 260 + 261 + // Get total count for this collection (with where filter if present) 262 + let total_count = 263 + records.get_collection_count_with_where( 264 + db, 265 + collection_nsid, 266 + where_clause, 267 + ) 268 + |> result.map(option.Some) 269 + |> result.unwrap(option.None) 270 + 271 + // Fetch records from database for this collection with pagination 272 + case 273 + records.get_by_collection_paginated_with_where( 274 + db, 275 + collection_nsid, 276 + pagination_params.first, 277 + pagination_params.after, 278 + pagination_params.last, 279 + pagination_params.before, 280 + pagination_params.sort_by, 281 + where_clause, 282 + ) 283 + { 284 + Error(_) -> Ok(#([], option.None, False, False, option.None)) 285 + // Return empty result on error 286 + Ok(#(record_list, next_cursor, has_next_page, has_previous_page)) -> { 287 + // Convert database records to GraphQL values with cursors 288 + let graphql_records_with_cursors = 289 + list.map(record_list, fn(record) { 290 + let graphql_value = converters.record_to_graphql_value(record, db) 291 + // Generate cursor for this record 292 + let record_cursor = 293 + pagination.generate_cursor_from_record( 294 + record, 295 + pagination_params.sort_by, 296 + ) 297 + #(graphql_value, record_cursor) 298 + }) 299 + Ok(#( 300 + graphql_records_with_cursors, 301 + next_cursor, 302 + has_next_page, 303 + has_previous_page, 304 + total_count, 305 + )) 306 + } 307 + } 308 + } 309 + } 310 + 311 + /// Create a batch fetcher for join operations (forward and reverse) 312 + pub fn batch_fetcher(db: sqlight.Connection) { 313 + fn( 314 + uris: List(String), 315 + collection: String, 316 + field: option.Option(String), 317 + ) -> Result(dataloader.BatchResult, String) { 318 + // Check if this is a forward join (field is None) or reverse join (field is Some) 319 + case field { 320 + option.None -> { 321 + // Determine if we're dealing with DIDs or URIs 322 + case uris { 323 + [] -> Ok(dict.new()) 324 + [first, ..] -> { 325 + case string.starts_with(first, "did:") { 326 + True -> { 327 + // DID join: fetch records by DID and collection 328 + case records.get_by_dids_and_collection(db, uris, collection) { 329 + Ok(record_list) -> { 330 + // Group records by DID 331 + let grouped = 332 + list.fold(record_list, dict.new(), fn(acc, record) { 333 + let graphql_value = 334 + converters.record_to_graphql_value(record, db) 335 + let existing = 336 + dict.get(acc, record.did) |> result.unwrap([]) 337 + dict.insert(acc, record.did, [graphql_value, ..existing]) 338 + }) 339 + Ok(grouped) 340 + } 341 + Error(_) -> Error("Failed to fetch records by DIDs") 342 + } 343 + } 344 + False -> { 345 + // Forward join: fetch records by their URIs 346 + case records.get_by_uris(db, uris) { 347 + Ok(record_list) -> { 348 + // Group records by URI 349 + let grouped = 350 + list.fold(record_list, dict.new(), fn(acc, record) { 351 + let graphql_value = 352 + converters.record_to_graphql_value(record, db) 353 + // For forward joins, return single record per URI 354 + dict.insert(acc, record.uri, [graphql_value]) 355 + }) 356 + Ok(grouped) 357 + } 358 + Error(_) -> Error("Failed to fetch records by URIs") 359 + } 360 + } 361 + } 362 + } 363 + } 364 + } 365 + option.Some(reference_field) -> { 366 + // Reverse join: fetch records that reference the parent URIs 367 + case 368 + records.get_by_reference_field(db, collection, reference_field, uris) 369 + { 370 + Ok(record_list) -> { 371 + // Group records by the parent URI they reference 372 + // Parse each record's JSON to extract the reference field value 373 + let grouped = 374 + list.fold(record_list, dict.new(), fn(acc, record) { 375 + let graphql_value = 376 + converters.record_to_graphql_value(record, db) 377 + // Extract the reference field from the record JSON to find parent URI 378 + case 379 + converters.extract_reference_uri(record.json, reference_field) 380 + { 381 + Ok(parent_uri) -> { 382 + let existing = 383 + dict.get(acc, parent_uri) |> result.unwrap([]) 384 + dict.insert(acc, parent_uri, [graphql_value, ..existing]) 385 + } 386 + Error(_) -> acc 387 + } 388 + }) 389 + Ok(grouped) 390 + } 391 + Error(_) -> 392 + Error( 393 + "Failed to fetch records by reference field: " <> reference_field, 394 + ) 395 + } 396 + } 397 + } 398 + } 399 + } 400 + 401 + /// Create a paginated batch fetcher for join operations with pagination 402 + pub fn paginated_batch_fetcher(db: sqlight.Connection) { 403 + fn( 404 + key: String, 405 + collection: String, 406 + field: option.Option(String), 407 + pagination_params: dataloader.PaginationParams, 408 + ) -> Result(dataloader.PaginatedBatchResult, String) { 409 + // Convert pagination params to database pagination params 410 + let db_first = pagination_params.first 411 + let db_after = pagination_params.after 412 + let db_last = pagination_params.last 413 + let db_before = pagination_params.before 414 + let db_sort_by = pagination_params.sort_by 415 + 416 + // Convert where clause from GraphQL to database format 417 + let db_where = case pagination_params.where { 418 + option.Some(where_clause) -> 419 + option.Some(where_converter.convert_where_clause(where_clause)) 420 + option.None -> option.None 421 + } 422 + 423 + // Check if this is a DID join (field is None) or reverse join (field is Some) 424 + case field { 425 + option.None -> { 426 + // DID join: key is the DID 427 + case 428 + records.get_by_dids_and_collection_paginated( 429 + db, 430 + key, 431 + collection, 432 + db_first, 433 + db_after, 434 + db_last, 435 + db_before, 436 + db_sort_by, 437 + db_where, 438 + ) 439 + { 440 + Ok(#( 441 + record_list, 442 + _next_cursor, 443 + has_next_page, 444 + has_previous_page, 445 + total_count, 446 + )) -> { 447 + // Convert records to GraphQL values with cursors 448 + let edges = 449 + list.map(record_list, fn(record) { 450 + let graphql_value = 451 + converters.record_to_graphql_value(record, db) 452 + let cursor = 453 + pagination.generate_cursor_from_record(record, db_sort_by) 454 + #(graphql_value, cursor) 455 + }) 456 + 457 + Ok(dataloader.PaginatedBatchResult( 458 + edges: edges, 459 + has_next_page: has_next_page, 460 + has_previous_page: has_previous_page, 461 + total_count: total_count, 462 + )) 463 + } 464 + Error(_) -> Error("Failed to fetch paginated records by DID") 465 + } 466 + } 467 + option.Some(reference_field) -> { 468 + // Reverse join: key is the parent URI 469 + case 470 + records.get_by_reference_field_paginated( 471 + db, 472 + collection, 473 + reference_field, 474 + key, 475 + db_first, 476 + db_after, 477 + db_last, 478 + db_before, 479 + db_sort_by, 480 + db_where, 481 + ) 482 + { 483 + Ok(#( 484 + record_list, 485 + _next_cursor, 486 + has_next_page, 487 + has_previous_page, 488 + total_count, 489 + )) -> { 490 + // Convert records to GraphQL values with cursors 491 + let edges = 492 + list.map(record_list, fn(record) { 493 + let graphql_value = 494 + converters.record_to_graphql_value(record, db) 495 + let cursor = 496 + pagination.generate_cursor_from_record(record, db_sort_by) 497 + #(graphql_value, cursor) 498 + }) 499 + 500 + Ok(dataloader.PaginatedBatchResult( 501 + edges: edges, 502 + has_next_page: has_next_page, 503 + has_previous_page: has_previous_page, 504 + total_count: total_count, 505 + )) 506 + } 507 + Error(_) -> 508 + Error( 509 + "Failed to fetch paginated records by reference field: " 510 + <> reference_field, 511 + ) 512 + } 513 + } 514 + } 515 + } 516 + } 517 + 518 + /// Create an aggregate fetcher for GROUP BY queries 519 + pub fn aggregate_fetcher(db: sqlight.Connection) { 520 + fn(collection_nsid: String, params: database.AggregateParams) { 521 + // Convert GraphQL where clause to SQL where clause 522 + let where_clause = case params.where { 523 + option.Some(graphql_where) -> 524 + option.Some(where_converter.convert_where_clause(graphql_where)) 525 + option.None -> option.None 526 + } 527 + 528 + // Convert GroupByFieldInput to types.GroupByField 529 + let group_by_fields = 530 + list.map(params.group_by, fn(gb) { 531 + case gb.interval { 532 + option.Some(interval) -> { 533 + let db_interval = case interval { 534 + aggregate.Hour -> types.Hour 535 + aggregate.Day -> types.Day 536 + aggregate.Week -> types.Week 537 + aggregate.Month -> types.Month 538 + } 539 + types.TruncatedField(gb.field, db_interval) 540 + } 541 + option.None -> types.SimpleField(gb.field) 542 + } 543 + }) 544 + 545 + // Call database aggregation function 546 + aggregates.get_aggregated_records( 547 + db, 548 + collection_nsid, 549 + group_by_fields, 550 + where_clause, 551 + params.order_by_desc, 552 + params.limit, 553 + ) 554 + |> result.map_error(fn(_) { "Failed to fetch aggregated records" }) 555 + } 556 + } 557 + 558 + /// Create a viewer fetcher for authenticated user info 559 + pub fn viewer_fetcher(db: sqlight.Connection) { 560 + fn(token: String) { 561 + case atproto_auth.verify_token(db, token) { 562 + Error(_) -> Error("Invalid or expired token") 563 + Ok(user_info) -> { 564 + // Get handle from actors table 565 + let handle = case actors.get(db, user_info.did) { 566 + Ok([actor, ..]) -> option.Some(actor.handle) 567 + _ -> option.None 568 + } 569 + Ok(#(user_info.did, handle)) 570 + } 571 + } 572 + } 573 + } 574 + ``` 575 + 576 + **Step 2: Add missing import for atproto_auth** 577 + 578 + The viewer_fetcher uses `atproto_auth.verify_token`. Add this import at the top: 579 + 580 + ```gleam 581 + import atproto_auth 582 + ``` 583 + 584 + **Step 3: Run gleam check to verify syntax** 585 + 586 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam check` 587 + Expected: No errors 588 + 589 + **Step 4: Commit** 590 + 591 + ```bash 592 + git add src/graphql/lexicon/fetchers.gleam 593 + git commit -m "refactor: extract lexicon GraphQL fetchers to separate module" 594 + ``` 595 + 596 + --- 597 + 598 + ### Task 3: Create mutations.gleam 599 + 600 + **Files:** 601 + - Create: `server/src/graphql/lexicon/mutations.gleam` 602 + 603 + **Step 1: Create mutations.gleam with MutationContext, auth helper, and resolver factories** 604 + 605 + This is a large file. Copy from `mutation_resolvers.gleam` with these changes: 606 + 607 + 1. Add private `AuthenticatedSession` type and `get_authenticated_session` helper 608 + 2. Update each resolver factory to use the helper instead of inline auth code 609 + 3. Keep all blob transformation helpers as private functions 610 + 611 + ```gleam 612 + /// Mutation Resolvers for lexicon GraphQL API 613 + /// 614 + /// Implements GraphQL mutation resolvers with AT Protocol integration. 615 + /// These resolvers handle authentication, validation, and database operations. 616 + import actor_validator 617 + import atproto_auth 618 + import backfill 619 + import database/repositories/lexicons 620 + import database/repositories/records 621 + import dpop 622 + import gleam/dict 623 + import gleam/dynamic 624 + import gleam/dynamic/decode 625 + import gleam/erlang/process.{type Subject} 626 + import gleam/int 627 + import gleam/json 628 + import gleam/list 629 + import gleam/option 630 + import gleam/result 631 + import honk 632 + import honk/errors 633 + import lib/oauth/did_cache 634 + import sqlight 635 + import swell/schema 636 + import swell/value 637 + 638 + /// Context for mutation execution 639 + pub type MutationContext { 640 + MutationContext( 641 + db: sqlight.Connection, 642 + did_cache: Subject(did_cache.Message), 643 + signing_key: option.Option(String), 644 + atp_client_id: String, 645 + plc_url: String, 646 + collection_ids: List(String), 647 + external_collection_ids: List(String), 648 + ) 649 + } 650 + 651 + // ─── Private Auth Helper ─────────────────────────────────────────── 652 + 653 + /// Authenticated session info returned by auth helper 654 + type AuthenticatedSession { 655 + AuthenticatedSession( 656 + user_info: atproto_auth.UserInfo, 657 + session: atproto_auth.AtpSession, 658 + ) 659 + } 660 + 661 + /// Extract token, verify auth, ensure actor exists, get ATP session 662 + /// Consolidates ~40 lines of repeated auth code into one call 663 + fn get_authenticated_session( 664 + resolver_ctx: schema.Context, 665 + ctx: MutationContext, 666 + ) -> Result(AuthenticatedSession, String) { 667 + // Step 1: Extract auth token from context data 668 + let token = case resolver_ctx.data { 669 + option.Some(value.Object(fields)) -> { 670 + case list.key_find(fields, "auth_token") { 671 + Ok(value.String(t)) -> Ok(t) 672 + Ok(_) -> Error("auth_token must be a string") 673 + Error(_) -> 674 + Error("Authentication required. Please provide Authorization header.") 675 + } 676 + } 677 + _ -> Error("Authentication required. Please provide Authorization header.") 678 + } 679 + 680 + use token <- result.try(token) 681 + 682 + // Step 2: Verify OAuth token 683 + use user_info <- result.try( 684 + atproto_auth.verify_token(ctx.db, token) 685 + |> result.map_error(fn(err) { 686 + case err { 687 + atproto_auth.UnauthorizedToken -> "Unauthorized" 688 + atproto_auth.TokenExpired -> "Token expired" 689 + atproto_auth.MissingAuthHeader -> "Missing authentication" 690 + atproto_auth.InvalidAuthHeader -> "Invalid authentication header" 691 + _ -> "Authentication error" 692 + } 693 + }), 694 + ) 695 + 696 + // Step 3: Ensure actor exists in database 697 + use is_new_actor <- result.try(actor_validator.ensure_actor_exists( 698 + ctx.db, 699 + user_info.did, 700 + ctx.plc_url, 701 + )) 702 + 703 + // If new actor, spawn backfill for all collections 704 + case is_new_actor { 705 + True -> { 706 + process.spawn_unlinked(fn() { 707 + backfill.backfill_collections_for_actor( 708 + ctx.db, 709 + user_info.did, 710 + ctx.collection_ids, 711 + ctx.external_collection_ids, 712 + ctx.plc_url, 713 + ) 714 + }) 715 + Nil 716 + } 717 + False -> Nil 718 + } 719 + 720 + // Step 4: Get AT Protocol session 721 + use session <- result.try( 722 + atproto_auth.get_atp_session( 723 + ctx.db, 724 + ctx.did_cache, 725 + token, 726 + ctx.signing_key, 727 + ctx.atp_client_id, 728 + ) 729 + |> result.map_error(fn(err) { 730 + case err { 731 + atproto_auth.SessionNotFound -> "Session not found" 732 + atproto_auth.SessionNotReady -> "Session not ready" 733 + atproto_auth.RefreshFailed(msg) -> "Token refresh failed: " <> msg 734 + atproto_auth.DIDResolutionFailed(msg) -> "DID resolution failed: " <> msg 735 + _ -> "Failed to get ATP session" 736 + } 737 + }), 738 + ) 739 + 740 + Ok(AuthenticatedSession(user_info: user_info, session: session)) 741 + } 742 + 743 + // ─── Private Blob Helpers ────────────────────────────────────────── 744 + 745 + /// Convert GraphQL value to JSON value (not string) 746 + fn graphql_value_to_json_value(val: value.Value) -> json.Json { 747 + case val { 748 + value.String(s) -> json.string(s) 749 + value.Int(i) -> json.int(i) 750 + value.Float(f) -> json.float(f) 751 + value.Boolean(b) -> json.bool(b) 752 + value.Null -> json.null() 753 + value.Enum(e) -> json.string(e) 754 + value.List(items) -> json.array(items, graphql_value_to_json_value) 755 + value.Object(fields) -> { 756 + json.object( 757 + fields 758 + |> list.map(fn(field) { 759 + let #(key, val) = field 760 + #(key, graphql_value_to_json_value(val)) 761 + }), 762 + ) 763 + } 764 + } 765 + } 766 + 767 + /// Get blob field paths from a lexicon for a given collection 768 + fn get_blob_paths( 769 + collection: String, 770 + lexicons: List(json.Json), 771 + ) -> List(List(String)) { 772 + let lexicon = 773 + list.find(lexicons, fn(lex) { 774 + case json.parse(json.to_string(lex), decode.at(["id"], decode.string)) { 775 + Ok(id) -> id == collection 776 + Error(_) -> False 777 + } 778 + }) 779 + 780 + case lexicon { 781 + Ok(lex) -> { 782 + let properties_decoder = 783 + decode.at( 784 + ["defs", "main", "record", "properties"], 785 + decode.dict(decode.string, decode.dynamic), 786 + ) 787 + case json.parse(json.to_string(lex), properties_decoder) { 788 + Ok(properties) -> extract_blob_paths_from_properties(properties, []) 789 + Error(_) -> [] 790 + } 791 + } 792 + Error(_) -> [] 793 + } 794 + } 795 + 796 + /// Recursively extract blob paths from lexicon properties 797 + fn extract_blob_paths_from_properties( 798 + properties: dict.Dict(String, dynamic.Dynamic), 799 + current_path: List(String), 800 + ) -> List(List(String)) { 801 + dict.fold(properties, [], fn(acc, field_name, field_def) { 802 + let field_path = list.append(current_path, [field_name]) 803 + let type_result = decode.run(field_def, decode.at(["type"], decode.string)) 804 + 805 + case type_result { 806 + Ok("blob") -> [field_path, ..acc] 807 + Ok("object") -> { 808 + let nested_props_result = 809 + decode.run( 810 + field_def, 811 + decode.at( 812 + ["properties"], 813 + decode.dict(decode.string, decode.dynamic), 814 + ), 815 + ) 816 + case nested_props_result { 817 + Ok(nested_props) -> { 818 + let nested_paths = 819 + extract_blob_paths_from_properties(nested_props, field_path) 820 + list.append(nested_paths, acc) 821 + } 822 + Error(_) -> acc 823 + } 824 + } 825 + Ok("array") -> { 826 + let items_type_result = 827 + decode.run(field_def, decode.at(["items", "type"], decode.string)) 828 + case items_type_result { 829 + Ok("blob") -> [field_path, ..acc] 830 + Ok("object") -> { 831 + let item_props_result = 832 + decode.run( 833 + field_def, 834 + decode.at( 835 + ["items", "properties"], 836 + decode.dict(decode.string, decode.dynamic), 837 + ), 838 + ) 839 + case item_props_result { 840 + Ok(item_props) -> { 841 + let nested_paths = 842 + extract_blob_paths_from_properties(item_props, field_path) 843 + list.append(nested_paths, acc) 844 + } 845 + Error(_) -> acc 846 + } 847 + } 848 + _ -> acc 849 + } 850 + } 851 + _ -> acc 852 + } 853 + }) 854 + } 855 + 856 + /// Transform blob inputs in a value from GraphQL format to AT Protocol format 857 + fn transform_blob_inputs( 858 + input: value.Value, 859 + blob_paths: List(List(String)), 860 + ) -> value.Value { 861 + transform_value_at_paths(input, blob_paths, []) 862 + } 863 + 864 + /// Recursively transform values at blob paths 865 + fn transform_value_at_paths( 866 + val: value.Value, 867 + blob_paths: List(List(String)), 868 + current_path: List(String), 869 + ) -> value.Value { 870 + case val { 871 + value.Object(fields) -> { 872 + let is_blob_path = 873 + list.any(blob_paths, fn(path) { 874 + path == current_path && current_path != [] 875 + }) 876 + 877 + case is_blob_path { 878 + True -> transform_blob_object(fields) 879 + False -> { 880 + value.Object( 881 + list.map(fields, fn(field) { 882 + let #(key, field_val) = field 883 + let new_path = list.append(current_path, [key]) 884 + #(key, transform_value_at_paths(field_val, blob_paths, new_path)) 885 + }), 886 + ) 887 + } 888 + } 889 + } 890 + value.List(items) -> { 891 + let is_blob_array_path = 892 + list.any(blob_paths, fn(path) { 893 + path == current_path && current_path != [] 894 + }) 895 + 896 + case is_blob_array_path { 897 + True -> { 898 + value.List( 899 + list.map(items, fn(item) { 900 + case item { 901 + value.Object(item_fields) -> transform_blob_object(item_fields) 902 + _ -> item 903 + } 904 + }), 905 + ) 906 + } 907 + False -> { 908 + let paths_through_here = 909 + list.filter(blob_paths, fn(path) { 910 + list.length(path) > list.length(current_path) 911 + && list.take(path, list.length(current_path)) == current_path 912 + }) 913 + 914 + case list.is_empty(paths_through_here) { 915 + True -> val 916 + False -> { 917 + value.List( 918 + list.map(items, fn(item) { 919 + transform_value_at_paths(item, blob_paths, current_path) 920 + }), 921 + ) 922 + } 923 + } 924 + } 925 + } 926 + } 927 + _ -> val 928 + } 929 + } 930 + 931 + /// Transform a BlobInput object to AT Protocol blob format 932 + fn transform_blob_object(fields: List(#(String, value.Value))) -> value.Value { 933 + let ref = case list.key_find(fields, "ref") { 934 + Ok(value.String(r)) -> r 935 + _ -> "" 936 + } 937 + let mime_type = case list.key_find(fields, "mimeType") { 938 + Ok(value.String(m)) -> m 939 + _ -> "" 940 + } 941 + let size = case list.key_find(fields, "size") { 942 + Ok(value.Int(s)) -> s 943 + _ -> 0 944 + } 945 + 946 + case ref != "" && mime_type != "" { 947 + True -> 948 + value.Object([ 949 + #("$type", value.String("blob")), 950 + #("ref", value.Object([#("$link", value.String(ref))])), 951 + #("mimeType", value.String(mime_type)), 952 + #("size", value.Int(size)), 953 + ]) 954 + False -> value.Object(fields) 955 + } 956 + } 957 + 958 + /// Decode base64 string to bit array 959 + fn decode_base64(base64_str: String) -> Result(BitArray, Nil) { 960 + Ok(do_erlang_base64_decode(base64_str)) 961 + } 962 + 963 + /// Extract blob fields from dynamic PDS response 964 + fn extract_blob_from_dynamic( 965 + blob_dynamic: dynamic.Dynamic, 966 + did: String, 967 + ) -> Result(value.Value, String) { 968 + let ref_link_decoder = { 969 + use link <- decode.field("$link", decode.string) 970 + decode.success(link) 971 + } 972 + 973 + let full_decoder = { 974 + use mime_type <- decode.field("mimeType", decode.string) 975 + use size <- decode.field("size", decode.int) 976 + use ref <- decode.field("ref", ref_link_decoder) 977 + decode.success(#(ref, mime_type, size)) 978 + } 979 + 980 + use #(ref, mime_type, size) <- result.try( 981 + decode.run(blob_dynamic, full_decoder) 982 + |> result.map_error(fn(_) { "Failed to decode blob fields" }), 983 + ) 984 + 985 + Ok( 986 + value.Object([ 987 + #("ref", value.String(ref)), 988 + #("mime_type", value.String(mime_type)), 989 + #("size", value.Int(size)), 990 + #("did", value.String(did)), 991 + ]), 992 + ) 993 + } 994 + 995 + /// Erlang FFI: base64:decode/1 returns BitArray directly (not Result) 996 + @external(erlang, "base64", "decode") 997 + fn do_erlang_base64_decode(a: String) -> BitArray 998 + 999 + // ─── Public Resolver Factories ───────────────────────────────────── 1000 + 1001 + /// Create a resolver factory for create mutations 1002 + pub fn create_resolver_factory( 1003 + collection: String, 1004 + ctx: MutationContext, 1005 + ) -> schema.Resolver { 1006 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 1007 + // Get authenticated session using helper 1008 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 1009 + 1010 + // Get input and rkey from arguments 1011 + let input_result = case schema.get_argument(resolver_ctx, "input") { 1012 + option.Some(val) -> Ok(val) 1013 + option.None -> Error("Missing required argument: input") 1014 + } 1015 + 1016 + use input <- result.try(input_result) 1017 + 1018 + let rkey = case schema.get_argument(resolver_ctx, "rkey") { 1019 + option.Some(value.String(r)) -> option.Some(r) 1020 + _ -> option.None 1021 + } 1022 + 1023 + // Fetch lexicons for validation and blob path extraction 1024 + use all_lexicon_records <- result.try( 1025 + lexicons.get_all(ctx.db) 1026 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 1027 + ) 1028 + 1029 + use all_lex_jsons <- result.try( 1030 + all_lexicon_records 1031 + |> list.try_map(fn(lex) { 1032 + honk.parse_json_string(lex.json) 1033 + |> result.map_error(fn(e) { 1034 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 1035 + }) 1036 + }), 1037 + ) 1038 + 1039 + // Transform blob inputs from GraphQL format to AT Protocol format 1040 + let blob_paths = get_blob_paths(collection, all_lex_jsons) 1041 + let transformed_input = transform_blob_inputs(input, blob_paths) 1042 + let record_json_value = graphql_value_to_json_value(transformed_input) 1043 + let record_json_string = json.to_string(record_json_value) 1044 + 1045 + // Validate against lexicon 1046 + use _ <- result.try( 1047 + honk.validate_record(all_lex_jsons, collection, record_json_value) 1048 + |> result.map_error(fn(err) { 1049 + "Validation failed: " <> errors.to_string(err) 1050 + }), 1051 + ) 1052 + 1053 + // Call createRecord via AT Protocol 1054 + let create_body = 1055 + case rkey { 1056 + option.Some(r) -> 1057 + json.object([ 1058 + #("repo", json.string(auth.user_info.did)), 1059 + #("collection", json.string(collection)), 1060 + #("rkey", json.string(r)), 1061 + #("record", record_json_value), 1062 + ]) 1063 + option.None -> 1064 + json.object([ 1065 + #("repo", json.string(auth.user_info.did)), 1066 + #("collection", json.string(collection)), 1067 + #("record", record_json_value), 1068 + ]) 1069 + } 1070 + |> json.to_string 1071 + 1072 + let pds_url = 1073 + auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.createRecord" 1074 + 1075 + use response <- result.try( 1076 + dpop.make_dpop_request("POST", pds_url, auth.session, create_body) 1077 + |> result.map_error(fn(_) { "Failed to create record on PDS" }), 1078 + ) 1079 + 1080 + use #(uri, cid) <- result.try(case response.status { 1081 + 200 | 201 -> { 1082 + let response_decoder = { 1083 + use uri <- decode.field("uri", decode.string) 1084 + use cid <- decode.field("cid", decode.string) 1085 + decode.success(#(uri, cid)) 1086 + } 1087 + json.parse(response.body, response_decoder) 1088 + |> result.map_error(fn(_) { 1089 + "Failed to parse PDS success response. Body: " <> response.body 1090 + }) 1091 + } 1092 + _ -> 1093 + Error( 1094 + "PDS request failed with status " 1095 + <> int.to_string(response.status) 1096 + <> ": " 1097 + <> response.body, 1098 + ) 1099 + }) 1100 + 1101 + // Index the created record in the database 1102 + use _ <- result.try( 1103 + records.insert( 1104 + ctx.db, 1105 + uri, 1106 + cid, 1107 + auth.user_info.did, 1108 + collection, 1109 + record_json_string, 1110 + ) 1111 + |> result.map_error(fn(_) { "Failed to index record in database" }), 1112 + ) 1113 + 1114 + Ok( 1115 + value.Object([ 1116 + #("uri", value.String(uri)), 1117 + #("cid", value.String(cid)), 1118 + #("did", value.String(auth.user_info.did)), 1119 + #("collection", value.String(collection)), 1120 + #("indexedAt", value.String("")), 1121 + #("value", input), 1122 + ]), 1123 + ) 1124 + } 1125 + } 1126 + 1127 + /// Create a resolver factory for update mutations 1128 + pub fn update_resolver_factory( 1129 + collection: String, 1130 + ctx: MutationContext, 1131 + ) -> schema.Resolver { 1132 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 1133 + // Get authenticated session using helper 1134 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 1135 + 1136 + // Get rkey (required) and input from arguments 1137 + let rkey_result = case schema.get_argument(resolver_ctx, "rkey") { 1138 + option.Some(value.String(r)) -> Ok(r) 1139 + option.Some(_) -> Error("rkey must be a string") 1140 + option.None -> Error("Missing required argument: rkey") 1141 + } 1142 + 1143 + use rkey <- result.try(rkey_result) 1144 + 1145 + let input_result = case schema.get_argument(resolver_ctx, "input") { 1146 + option.Some(val) -> Ok(val) 1147 + option.None -> Error("Missing required argument: input") 1148 + } 1149 + 1150 + use input <- result.try(input_result) 1151 + 1152 + // Fetch lexicons for validation and blob path extraction 1153 + use all_lexicon_records <- result.try( 1154 + lexicons.get_all(ctx.db) 1155 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 1156 + ) 1157 + 1158 + use all_lex_jsons <- result.try( 1159 + all_lexicon_records 1160 + |> list.try_map(fn(lex) { 1161 + honk.parse_json_string(lex.json) 1162 + |> result.map_error(fn(e) { 1163 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 1164 + }) 1165 + }), 1166 + ) 1167 + 1168 + // Transform blob inputs from GraphQL format to AT Protocol format 1169 + let blob_paths = get_blob_paths(collection, all_lex_jsons) 1170 + let transformed_input = transform_blob_inputs(input, blob_paths) 1171 + let record_json_value = graphql_value_to_json_value(transformed_input) 1172 + let record_json_string = json.to_string(record_json_value) 1173 + 1174 + // Validate against lexicon 1175 + use _ <- result.try( 1176 + honk.validate_record(all_lex_jsons, collection, record_json_value) 1177 + |> result.map_error(fn(err) { 1178 + "Validation failed: " <> errors.to_string(err) 1179 + }), 1180 + ) 1181 + 1182 + // Call putRecord via AT Protocol 1183 + let update_body = 1184 + json.object([ 1185 + #("repo", json.string(auth.user_info.did)), 1186 + #("collection", json.string(collection)), 1187 + #("rkey", json.string(rkey)), 1188 + #("record", record_json_value), 1189 + ]) 1190 + |> json.to_string 1191 + 1192 + let pds_url = auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.putRecord" 1193 + 1194 + use response <- result.try( 1195 + dpop.make_dpop_request("POST", pds_url, auth.session, update_body) 1196 + |> result.map_error(fn(_) { "Failed to update record on PDS" }), 1197 + ) 1198 + 1199 + use #(uri, cid) <- result.try(case response.status { 1200 + 200 | 201 -> { 1201 + let response_decoder = { 1202 + use uri <- decode.field("uri", decode.string) 1203 + use cid <- decode.field("cid", decode.string) 1204 + decode.success(#(uri, cid)) 1205 + } 1206 + json.parse(response.body, response_decoder) 1207 + |> result.map_error(fn(_) { 1208 + "Failed to parse PDS success response. Body: " <> response.body 1209 + }) 1210 + } 1211 + _ -> 1212 + Error( 1213 + "PDS request failed with status " 1214 + <> int.to_string(response.status) 1215 + <> ": " 1216 + <> response.body, 1217 + ) 1218 + }) 1219 + 1220 + // Update the record in the database 1221 + use _ <- result.try( 1222 + records.update(ctx.db, uri, cid, record_json_string) 1223 + |> result.map_error(fn(_) { "Failed to update record in database" }), 1224 + ) 1225 + 1226 + Ok( 1227 + value.Object([ 1228 + #("uri", value.String(uri)), 1229 + #("cid", value.String(cid)), 1230 + #("did", value.String(auth.user_info.did)), 1231 + #("collection", value.String(collection)), 1232 + #("indexedAt", value.String("")), 1233 + #("value", input), 1234 + ]), 1235 + ) 1236 + } 1237 + } 1238 + 1239 + /// Create a resolver factory for delete mutations 1240 + pub fn delete_resolver_factory( 1241 + collection: String, 1242 + ctx: MutationContext, 1243 + ) -> schema.Resolver { 1244 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 1245 + // Get authenticated session using helper 1246 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 1247 + 1248 + // Get rkey (required) from arguments 1249 + let rkey_result = case schema.get_argument(resolver_ctx, "rkey") { 1250 + option.Some(value.String(r)) -> Ok(r) 1251 + option.Some(_) -> Error("rkey must be a string") 1252 + option.None -> Error("Missing required argument: rkey") 1253 + } 1254 + 1255 + use rkey <- result.try(rkey_result) 1256 + 1257 + // Build the record URI to be deleted 1258 + let uri = 1259 + "at://" <> auth.user_info.did <> "/" <> collection <> "/" <> rkey 1260 + 1261 + // Call deleteRecord via AT Protocol 1262 + let delete_body = 1263 + json.object([ 1264 + #("repo", json.string(auth.user_info.did)), 1265 + #("collection", json.string(collection)), 1266 + #("rkey", json.string(rkey)), 1267 + ]) 1268 + |> json.to_string 1269 + 1270 + let pds_url = 1271 + auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.deleteRecord" 1272 + 1273 + use response <- result.try( 1274 + dpop.make_dpop_request("POST", pds_url, auth.session, delete_body) 1275 + |> result.map_error(fn(_) { "Failed to delete record on PDS" }), 1276 + ) 1277 + 1278 + use _ <- result.try(case response.status { 1279 + 200 | 201 | 204 -> Ok(Nil) 1280 + _ -> 1281 + Error( 1282 + "PDS delete request failed with status " 1283 + <> int.to_string(response.status) 1284 + <> ": " 1285 + <> response.body, 1286 + ) 1287 + }) 1288 + 1289 + // Delete the record from the database 1290 + use _ <- result.try( 1291 + records.delete(ctx.db, uri) 1292 + |> result.map_error(fn(_) { "Failed to delete record from database" }), 1293 + ) 1294 + 1295 + Ok(value.Object([#("uri", value.String(uri))])) 1296 + } 1297 + } 1298 + 1299 + /// Create a resolver for uploadBlob mutation 1300 + pub fn upload_blob_resolver_factory(ctx: MutationContext) -> schema.Resolver { 1301 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 1302 + // Get authenticated session using helper 1303 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 1304 + 1305 + // Get data and mimeType from arguments 1306 + let data_result = case schema.get_argument(resolver_ctx, "data") { 1307 + option.Some(value.String(d)) -> Ok(d) 1308 + option.Some(_) -> Error("data must be a string") 1309 + option.None -> Error("Missing required argument: data") 1310 + } 1311 + 1312 + use data_base64 <- result.try(data_result) 1313 + 1314 + let mime_type_result = case schema.get_argument(resolver_ctx, "mimeType") { 1315 + option.Some(value.String(m)) -> Ok(m) 1316 + option.Some(_) -> Error("mimeType must be a string") 1317 + option.None -> Error("Missing required argument: mimeType") 1318 + } 1319 + 1320 + use mime_type <- result.try(mime_type_result) 1321 + 1322 + // Decode base64 data to binary 1323 + use binary_data <- result.try( 1324 + decode_base64(data_base64) 1325 + |> result.map_error(fn(_) { "Failed to decode base64 data" }), 1326 + ) 1327 + 1328 + // Upload blob to PDS 1329 + let pds_url = auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.uploadBlob" 1330 + 1331 + use response <- result.try( 1332 + dpop.make_dpop_request_with_binary( 1333 + "POST", 1334 + pds_url, 1335 + auth.session, 1336 + binary_data, 1337 + mime_type, 1338 + ) 1339 + |> result.map_error(fn(_) { "Failed to upload blob to PDS" }), 1340 + ) 1341 + 1342 + use blob_ref <- result.try(case response.status { 1343 + 200 | 201 -> { 1344 + let response_decoder = { 1345 + use blob <- decode.field("blob", decode.dynamic) 1346 + decode.success(blob) 1347 + } 1348 + 1349 + case json.parse(response.body, response_decoder) { 1350 + Ok(blob_dynamic) -> 1351 + extract_blob_from_dynamic(blob_dynamic, auth.user_info.did) 1352 + Error(_) -> 1353 + Error("Failed to parse PDS response. Body: " <> response.body) 1354 + } 1355 + } 1356 + _ -> 1357 + Error( 1358 + "PDS request failed with status " 1359 + <> int.to_string(response.status) 1360 + <> ": " 1361 + <> response.body, 1362 + ) 1363 + }) 1364 + 1365 + Ok(blob_ref) 1366 + } 1367 + } 1368 + ``` 1369 + 1370 + **Step 2: Run gleam check to verify syntax** 1371 + 1372 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam check` 1373 + Expected: No errors 1374 + 1375 + **Step 3: Commit** 1376 + 1377 + ```bash 1378 + git add src/graphql/lexicon/mutations.gleam 1379 + git commit -m "refactor: extract lexicon GraphQL mutations with shared auth helper" 1380 + ``` 1381 + 1382 + --- 1383 + 1384 + ### Task 4: Create schema.gleam 1385 + 1386 + **Files:** 1387 + - Create: `server/src/graphql/lexicon/schema.gleam` 1388 + 1389 + **Step 1: Create schema.gleam with public API functions** 1390 + 1391 + ```gleam 1392 + /// Lexicon GraphQL schema entry point 1393 + /// 1394 + /// Public API for building and executing the lexicon-driven GraphQL schema. 1395 + /// External code should import this module for all lexicon GraphQL operations. 1396 + import backfill 1397 + import database/repositories/config as config_repo 1398 + import database/repositories/lexicons 1399 + import gleam/dict 1400 + import gleam/dynamic 1401 + import gleam/dynamic/decode 1402 + import gleam/erlang/process.{type Subject} 1403 + import gleam/json 1404 + import gleam/list 1405 + import gleam/option 1406 + import gleam/result 1407 + import gleam/string 1408 + import graphql/lexicon/converters 1409 + import graphql/lexicon/fetchers 1410 + import graphql/lexicon/mutations 1411 + import lexicon_graphql 1412 + import lexicon_graphql/schema/database 1413 + import lib/oauth/did_cache 1414 + import sqlight 1415 + import swell/executor 1416 + import swell/schema 1417 + import swell/value 1418 + 1419 + /// Build a GraphQL schema from database lexicons 1420 + /// 1421 + /// This is exposed for WebSocket subscriptions to build the schema once 1422 + /// and reuse it for multiple subscription executions. 1423 + pub fn build_schema_from_db( 1424 + db: sqlight.Connection, 1425 + did_cache: Subject(did_cache.Message), 1426 + signing_key: option.Option(String), 1427 + atp_client_id: String, 1428 + plc_url: String, 1429 + domain_authority: String, 1430 + ) -> Result(schema.Schema, String) { 1431 + // Step 1: Fetch lexicons from database 1432 + use lexicon_records <- result.try( 1433 + lexicons.get_all(db) 1434 + |> result.map_error(fn(_) { "Failed to fetch lexicons from database" }), 1435 + ) 1436 + 1437 + // Step 2: Parse lexicon JSON into structured Lexicon types 1438 + let parsed_lexicons = 1439 + lexicon_records 1440 + |> list.filter_map(fn(lex) { 1441 + case lexicon_graphql.parse_lexicon(lex.json) { 1442 + Ok(parsed) -> Ok(parsed) 1443 + Error(_) -> Error(Nil) 1444 + } 1445 + }) 1446 + 1447 + // Check if we got any valid lexicons 1448 + case parsed_lexicons { 1449 + [] -> Error("No valid lexicons found in database") 1450 + _ -> { 1451 + // Step 3: Create fetchers 1452 + let record_fetcher = fetchers.record_fetcher(db) 1453 + let batch_fetcher = fetchers.batch_fetcher(db) 1454 + let paginated_batch_fetcher = fetchers.paginated_batch_fetcher(db) 1455 + let aggregate_fetcher = fetchers.aggregate_fetcher(db) 1456 + let viewer_fetcher = fetchers.viewer_fetcher(db) 1457 + 1458 + // Step 4: Determine local and external collections for backfill 1459 + let collection_ids = 1460 + parsed_lexicons 1461 + |> list.filter_map(fn(lex) { 1462 + case backfill.nsid_matches_domain_authority(lex.id, domain_authority) { 1463 + True -> Ok(lex.id) 1464 + False -> Error(Nil) 1465 + } 1466 + }) 1467 + 1468 + let external_collection_ids = 1469 + parsed_lexicons 1470 + |> list.filter_map(fn(lex) { 1471 + case backfill.nsid_matches_domain_authority(lex.id, domain_authority) { 1472 + True -> Error(Nil) 1473 + False -> Ok(lex.id) 1474 + } 1475 + }) 1476 + 1477 + // Step 5: Create mutation resolver factories 1478 + let mutation_ctx = 1479 + mutations.MutationContext( 1480 + db: db, 1481 + did_cache: did_cache, 1482 + signing_key: signing_key, 1483 + atp_client_id: atp_client_id, 1484 + plc_url: plc_url, 1485 + collection_ids: collection_ids, 1486 + external_collection_ids: external_collection_ids, 1487 + ) 1488 + 1489 + let create_factory = 1490 + option.Some(fn(collection) { 1491 + mutations.create_resolver_factory(collection, mutation_ctx) 1492 + }) 1493 + 1494 + let update_factory = 1495 + option.Some(fn(collection) { 1496 + mutations.update_resolver_factory(collection, mutation_ctx) 1497 + }) 1498 + 1499 + let delete_factory = 1500 + option.Some(fn(collection) { 1501 + mutations.delete_resolver_factory(collection, mutation_ctx) 1502 + }) 1503 + 1504 + let upload_blob_factory = 1505 + option.Some(fn() { 1506 + mutations.upload_blob_resolver_factory(mutation_ctx) 1507 + }) 1508 + 1509 + // Step 6: Build schema with database-backed resolvers, mutations, and subscriptions 1510 + database.build_schema_with_subscriptions( 1511 + parsed_lexicons, 1512 + record_fetcher, 1513 + option.Some(batch_fetcher), 1514 + option.Some(paginated_batch_fetcher), 1515 + create_factory, 1516 + update_factory, 1517 + delete_factory, 1518 + upload_blob_factory, 1519 + option.Some(aggregate_fetcher), 1520 + option.Some(viewer_fetcher), 1521 + ) 1522 + } 1523 + } 1524 + } 1525 + 1526 + /// Execute a GraphQL query against lexicons in the database 1527 + /// 1528 + /// This fetches lexicons, builds a schema with database resolvers, 1529 + /// executes the query, and returns the result as JSON. 1530 + pub fn execute_query_with_db( 1531 + db: sqlight.Connection, 1532 + query_string: String, 1533 + variables_json_str: String, 1534 + auth_token: Result(String, Nil), 1535 + did_cache: Subject(did_cache.Message), 1536 + signing_key: option.Option(String), 1537 + atp_client_id: String, 1538 + plc_url: String, 1539 + ) -> Result(String, String) { 1540 + // Get domain authority from database 1541 + let domain_authority = case config_repo.get(db, "domain_authority") { 1542 + Ok(authority) -> authority 1543 + Error(_) -> "" 1544 + } 1545 + 1546 + // Build the schema 1547 + use graphql_schema <- result.try(build_schema_from_db( 1548 + db, 1549 + did_cache, 1550 + signing_key, 1551 + atp_client_id, 1552 + plc_url, 1553 + domain_authority, 1554 + )) 1555 + 1556 + // Create context with auth token if provided 1557 + let ctx_data = case auth_token { 1558 + Ok(token) -> { 1559 + // Add auth token to context for mutation resolvers 1560 + option.Some(value.Object([#("auth_token", value.String(token))])) 1561 + } 1562 + Error(_) -> option.None 1563 + } 1564 + 1565 + // Convert json variables to Dict(String, value.Value) 1566 + let variables_dict = json_string_to_variables_dict(variables_json_str) 1567 + 1568 + let ctx = schema.context_with_variables(ctx_data, variables_dict) 1569 + 1570 + // Execute the query 1571 + use response <- result.try(executor.execute(query_string, graphql_schema, ctx)) 1572 + 1573 + // Format the response as JSON 1574 + Ok(format_response(response)) 1575 + } 1576 + 1577 + /// Format an executor.Response as JSON string 1578 + /// Per GraphQL spec, only include "errors" field when there are actual errors 1579 + pub fn format_response(response: executor.Response) -> String { 1580 + let data_json = value_to_json(response.data) 1581 + 1582 + case response.errors { 1583 + [] -> "{\"data\": " <> data_json <> "}" 1584 + errors -> { 1585 + let error_strings = 1586 + list.map(errors, fn(err) { 1587 + let message_json = json.string(err.message) |> json.to_string 1588 + let path_json = json.array(err.path, of: json.string) |> json.to_string 1589 + "{\"message\": " <> message_json <> ", \"path\": " <> path_json <> "}" 1590 + }) 1591 + 1592 + let errors_json = "[" <> string.join(error_strings, ",") <> "]" 1593 + "{\"data\": " <> data_json <> ", \"errors\": " <> errors_json <> "}" 1594 + } 1595 + } 1596 + } 1597 + 1598 + /// Convert JSON string variables to Dict(String, value.Value) 1599 + /// Exported for use by subscription handlers 1600 + pub fn json_string_to_variables_dict( 1601 + json_string: String, 1602 + ) -> dict.Dict(String, value.Value) { 1603 + // First try to extract the "variables" field from the JSON 1604 + let variables_decoder = { 1605 + use vars <- decode.field("variables", decode.dynamic) 1606 + decode.success(vars) 1607 + } 1608 + 1609 + case json.parse(json_string, variables_decoder) { 1610 + Ok(dyn) -> { 1611 + // Convert dynamic to value.Value 1612 + case converters.json_dynamic_to_value(dyn) { 1613 + value.Object(fields) -> dict.from_list(fields) 1614 + _ -> dict.new() 1615 + } 1616 + } 1617 + Error(_) -> dict.new() 1618 + } 1619 + } 1620 + 1621 + /// Re-export parse_json_to_value for WebSocket handler 1622 + pub fn parse_json_to_value(json_str: String) -> Result(value.Value, String) { 1623 + converters.parse_json_to_value(json_str) 1624 + } 1625 + 1626 + // ─── Private Helpers ─────────────────────────────────────────────── 1627 + 1628 + /// Convert a GraphQL value to JSON string 1629 + fn value_to_json(val: value.Value) -> String { 1630 + case val { 1631 + value.Null -> "null" 1632 + value.Int(i) -> json.int(i) |> json.to_string 1633 + value.Float(f) -> json.float(f) |> json.to_string 1634 + value.String(s) -> json.string(s) |> json.to_string 1635 + value.Boolean(b) -> json.bool(b) |> json.to_string 1636 + value.Enum(e) -> json.string(e) |> json.to_string 1637 + value.List(items) -> { 1638 + let item_jsons = list.map(items, value_to_json) 1639 + "[" <> string.join(item_jsons, ",") <> "]" 1640 + } 1641 + value.Object(fields) -> { 1642 + let field_jsons = 1643 + list.map(fields, fn(field) { 1644 + let #(key, value) = field 1645 + let key_json = json.string(key) |> json.to_string 1646 + let value_json = value_to_json(value) 1647 + key_json <> ": " <> value_json 1648 + }) 1649 + "{" <> string.join(field_jsons, ",") <> "}" 1650 + } 1651 + } 1652 + } 1653 + ``` 1654 + 1655 + **Step 2: Run gleam check to verify syntax** 1656 + 1657 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam check` 1658 + Expected: No errors 1659 + 1660 + **Step 3: Commit** 1661 + 1662 + ```bash 1663 + git add src/graphql/lexicon/schema.gleam 1664 + git commit -m "refactor: add lexicon GraphQL schema entry point" 1665 + ``` 1666 + 1667 + --- 1668 + 1669 + ### Task 5: Update handler imports 1670 + 1671 + **Files:** 1672 + - Modify: `server/src/handlers/graphql.gleam:14` 1673 + - Modify: `server/src/handlers/graphql_ws.gleam:13` 1674 + - Modify: `server/src/lib/mcp/tools/graphql.gleam:5` 1675 + 1676 + **Step 1: Update handlers/graphql.gleam** 1677 + 1678 + Change line 14 from: 1679 + ```gleam 1680 + import graphql_gleam 1681 + ``` 1682 + 1683 + To: 1684 + ```gleam 1685 + import graphql/lexicon/schema as lexicon_schema 1686 + ``` 1687 + 1688 + Change line 143 from: 1689 + ```gleam 1690 + graphql_gleam.execute_query_with_db( 1691 + ``` 1692 + 1693 + To: 1694 + ```gleam 1695 + lexicon_schema.execute_query_with_db( 1696 + ``` 1697 + 1698 + **Step 2: Update handlers/graphql_ws.gleam** 1699 + 1700 + Change line 13 from: 1701 + ```gleam 1702 + import graphql_gleam 1703 + ``` 1704 + 1705 + To: 1706 + ```gleam 1707 + import graphql/lexicon/schema as lexicon_schema 1708 + ``` 1709 + 1710 + Change line 51 (parse_json_to_value call): 1711 + ```gleam 1712 + let value_object = case graphql_gleam.parse_json_to_value(event.value) { 1713 + ``` 1714 + 1715 + To: 1716 + ```gleam 1717 + let value_object = case lexicon_schema.parse_json_to_value(event.value) { 1718 + ``` 1719 + 1720 + Change line 94 (format_response call): 1721 + ```gleam 1722 + Ok(graphql_gleam.format_response(response)) 1723 + ``` 1724 + 1725 + To: 1726 + ```gleam 1727 + Ok(lexicon_schema.format_response(response)) 1728 + ``` 1729 + 1730 + Change line 198 (build_schema_from_db call): 1731 + ```gleam 1732 + graphql_gleam.build_schema_from_db( 1733 + ``` 1734 + 1735 + To: 1736 + ```gleam 1737 + lexicon_schema.build_schema_from_db( 1738 + ``` 1739 + 1740 + Change line 361 (json_string_to_variables_dict call): 1741 + ```gleam 1742 + graphql_gleam.json_string_to_variables_dict(vars_json) 1743 + ``` 1744 + 1745 + To: 1746 + ```gleam 1747 + lexicon_schema.json_string_to_variables_dict(vars_json) 1748 + ``` 1749 + 1750 + **Step 3: Update lib/mcp/tools/graphql.gleam** 1751 + 1752 + Change line 5 from: 1753 + ```gleam 1754 + import graphql_gleam 1755 + ``` 1756 + 1757 + To: 1758 + ```gleam 1759 + import graphql/lexicon/schema as lexicon_schema 1760 + ``` 1761 + 1762 + Change line 18 from: 1763 + ```gleam 1764 + use result_str <- result.try(graphql_gleam.execute_query_with_db( 1765 + ``` 1766 + 1767 + To: 1768 + ```gleam 1769 + use result_str <- result.try(lexicon_schema.execute_query_with_db( 1770 + ``` 1771 + 1772 + **Step 4: Run gleam check to verify all imports are correct** 1773 + 1774 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam check` 1775 + Expected: No errors 1776 + 1777 + **Step 5: Commit** 1778 + 1779 + ```bash 1780 + git add src/handlers/graphql.gleam src/handlers/graphql_ws.gleam src/lib/mcp/tools/graphql.gleam 1781 + git commit -m "refactor: update handlers to use new lexicon schema module" 1782 + ``` 1783 + 1784 + --- 1785 + 1786 + ### Task 6: Delete old files 1787 + 1788 + **Files:** 1789 + - Delete: `server/src/graphql_gleam.gleam` 1790 + - Delete: `server/src/mutation_resolvers.gleam` 1791 + 1792 + **Step 1: Delete the old files** 1793 + 1794 + ```bash 1795 + rm /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server/src/graphql_gleam.gleam 1796 + rm /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server/src/mutation_resolvers.gleam 1797 + ``` 1798 + 1799 + **Step 2: Run gleam check to verify no broken imports** 1800 + 1801 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam check` 1802 + Expected: No errors 1803 + 1804 + **Step 3: Run gleam build to verify full compilation** 1805 + 1806 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam build` 1807 + Expected: Compiles successfully 1808 + 1809 + **Step 4: Commit** 1810 + 1811 + ```bash 1812 + git add -A 1813 + git commit -m "refactor: remove old graphql_gleam.gleam and mutation_resolvers.gleam" 1814 + ``` 1815 + 1816 + --- 1817 + 1818 + ### Task 7: Final verification 1819 + 1820 + **Step 1: Run full test suite** 1821 + 1822 + Run: `cd /Users/chadmiller/code/quickslice/.worktrees/graphql-refactor/server && gleam test` 1823 + Expected: All tests pass 1824 + 1825 + **Step 2: If tests fail, fix any remaining issues** 1826 + 1827 + Common issues to check: 1828 + - Missing imports in test files that referenced `graphql_gleam` directly 1829 + - Type mismatches from module reorganization 1830 + 1831 + **Step 3: Final commit (if any fixes needed)** 1832 + 1833 + If all tests pass and the build works, the refactor is complete. 1834 + 1835 + --- 1836 + 1837 + ## Summary 1838 + 1839 + | Module | Lines (est.) | Responsibility | 1840 + |--------|--------------|----------------| 1841 + | converters.gleam | ~150 | Record → GraphQL value conversion, JSON parsing | 1842 + | fetchers.gleam | ~280 | Database fetcher functions for lexicon_graphql | 1843 + | mutations.gleam | ~450 | Mutation resolver factories + auth helper | 1844 + | schema.gleam | ~180 | Public API: build_schema, execute_query, format_response | 1845 + 1846 + **Total:** ~1060 lines across 4 files (down from ~1777, saving ~700 lines via auth helper consolidation) 1847 + 1848 + **Files to update:** 1849 + - `handlers/graphql.gleam` - import change 1850 + - `handlers/graphql_ws.gleam` - import change + 4 function call updates 1851 + - `lib/mcp/tools/graphql.gleam` - import change + function call update 1852 + 1853 + **Files to delete:** 1854 + - `graphql_gleam.gleam` 1855 + - `mutation_resolvers.gleam`
+167
server/src/graphql/lexicon/converters.gleam
··· 1 + /// Value converters for lexicon GraphQL API 2 + /// 3 + /// Transform database records and dynamic values to GraphQL value.Value objects 4 + import database/repositories/actors 5 + import database/types 6 + import gleam/dict 7 + import gleam/dynamic 8 + import gleam/dynamic/decode 9 + import gleam/json 10 + import gleam/list 11 + import sqlight 12 + import swell/value 13 + 14 + /// Convert a database Record to a GraphQL value.Value 15 + /// 16 + /// Creates an Object with all the record metadata plus the parsed JSON value 17 + pub fn record_to_graphql_value( 18 + record: types.Record, 19 + db: sqlight.Connection, 20 + ) -> value.Value { 21 + // Parse the record JSON and convert to GraphQL value 22 + let value_object = case parse_json_to_value(record.json) { 23 + Ok(val) -> val 24 + Error(_) -> value.Object([]) 25 + // Fallback to empty object on parse error 26 + } 27 + 28 + // Look up actor handle from actor table 29 + let actor_handle = case actors.get(db, record.did) { 30 + Ok([actor, ..]) -> value.String(actor.handle) 31 + _ -> value.Null 32 + } 33 + 34 + // Create the full record object with metadata and value 35 + value.Object([ 36 + #("uri", value.String(record.uri)), 37 + #("cid", value.String(record.cid)), 38 + #("did", value.String(record.did)), 39 + #("collection", value.String(record.collection)), 40 + #("indexedAt", value.String(record.indexed_at)), 41 + #("actorHandle", actor_handle), 42 + #("value", value_object), 43 + ]) 44 + } 45 + 46 + /// Parse a JSON string and convert it to a GraphQL value.Value 47 + pub fn parse_json_to_value(json_str: String) -> Result(value.Value, String) { 48 + // Parse JSON string to dynamic value 49 + case json.parse(json_str, decode.dynamic) { 50 + Ok(dyn) -> Ok(dynamic_to_value(dyn)) 51 + Error(_) -> Error("Failed to parse JSON") 52 + } 53 + } 54 + 55 + /// Convert a dynamic value to a GraphQL value.Value 56 + pub fn dynamic_to_value(dyn: dynamic.Dynamic) -> value.Value { 57 + // Try different decoders in order 58 + case decode.run(dyn, decode.string) { 59 + Ok(s) -> value.String(s) 60 + Error(_) -> 61 + case decode.run(dyn, decode.int) { 62 + Ok(i) -> value.Int(i) 63 + Error(_) -> 64 + case decode.run(dyn, decode.float) { 65 + Ok(f) -> value.Float(f) 66 + Error(_) -> 67 + case decode.run(dyn, decode.bool) { 68 + Ok(b) -> value.Boolean(b) 69 + Error(_) -> 70 + case decode.run(dyn, decode.list(decode.dynamic)) { 71 + Ok(items) -> { 72 + let converted_items = list.map(items, dynamic_to_value) 73 + value.List(converted_items) 74 + } 75 + Error(_) -> 76 + case 77 + decode.run( 78 + dyn, 79 + decode.dict(decode.string, decode.dynamic), 80 + ) 81 + { 82 + Ok(dict) -> { 83 + let fields = 84 + dict 85 + |> dict.to_list 86 + |> list.map(fn(entry) { 87 + let #(key, val) = entry 88 + #(key, dynamic_to_value(val)) 89 + }) 90 + value.Object(fields) 91 + } 92 + Error(_) -> value.Null 93 + } 94 + } 95 + } 96 + } 97 + } 98 + } 99 + } 100 + 101 + /// Convert a dynamic JSON value to graphql value.Value 102 + pub fn json_dynamic_to_value(dyn: dynamic.Dynamic) -> value.Value { 103 + // Try different decoders in order 104 + case decode.run(dyn, decode.string) { 105 + Ok(s) -> value.String(s) 106 + Error(_) -> 107 + case decode.run(dyn, decode.int) { 108 + Ok(i) -> value.Int(i) 109 + Error(_) -> 110 + case decode.run(dyn, decode.float) { 111 + Ok(f) -> value.Float(f) 112 + Error(_) -> 113 + case decode.run(dyn, decode.bool) { 114 + Ok(b) -> value.Boolean(b) 115 + Error(_) -> 116 + // Try as a list 117 + case decode.run(dyn, decode.list(decode.dynamic)) { 118 + Ok(items) -> 119 + value.List(list.map(items, json_dynamic_to_value)) 120 + Error(_) -> 121 + // Try as an object (dict) 122 + case 123 + decode.run( 124 + dyn, 125 + decode.dict(decode.string, decode.dynamic), 126 + ) 127 + { 128 + Ok(d) -> 129 + value.Object( 130 + list.map(dict.to_list(d), fn(pair) { 131 + #(pair.0, json_dynamic_to_value(pair.1)) 132 + }), 133 + ) 134 + Error(_) -> value.Null 135 + } 136 + } 137 + } 138 + } 139 + } 140 + } 141 + } 142 + 143 + /// Extract a reference URI from a record's JSON 144 + /// This handles both simple string fields (at-uri) and strongRef objects 145 + pub fn extract_reference_uri( 146 + json_str: String, 147 + field_name: String, 148 + ) -> Result(String, Nil) { 149 + // Parse the JSON 150 + case parse_json_to_value(json_str) { 151 + Ok(value.Object(fields)) -> { 152 + // Find the field 153 + case list.key_find(fields, field_name) { 154 + Ok(value.String(uri)) -> Ok(uri) 155 + Ok(value.Object(ref_fields)) -> { 156 + // Handle strongRef: { "uri": "...", "cid": "..." } 157 + case list.key_find(ref_fields, "uri") { 158 + Ok(value.String(uri)) -> Ok(uri) 159 + _ -> Error(Nil) 160 + } 161 + } 162 + _ -> Error(Nil) 163 + } 164 + } 165 + _ -> Error(Nil) 166 + } 167 + }
+350
server/src/graphql/lexicon/fetchers.gleam
··· 1 + /// Database fetchers for lexicon GraphQL API 2 + /// 3 + /// These functions bridge the database layer to the lexicon_graphql library's 4 + /// expected fetcher signatures for queries, joins, and aggregations. 5 + import atproto_auth 6 + import database/queries/aggregates 7 + import database/queries/pagination 8 + import database/repositories/actors 9 + import database/repositories/records 10 + import database/types 11 + import gleam/dict 12 + import gleam/list 13 + import gleam/option 14 + import gleam/result 15 + import gleam/string 16 + import graphql/lexicon/converters 17 + import graphql/where_converter 18 + import lexicon_graphql/input/aggregate 19 + import lexicon_graphql/query/dataloader 20 + import lexicon_graphql/schema/database 21 + import sqlight 22 + import swell/value 23 + 24 + /// Create a record fetcher for paginated collection queries 25 + pub fn record_fetcher(db: sqlight.Connection) { 26 + fn(collection_nsid: String, pagination_params: dataloader.PaginationParams) -> Result( 27 + #( 28 + List(#(value.Value, String)), 29 + option.Option(String), 30 + Bool, 31 + Bool, 32 + option.Option(Int), 33 + ), 34 + String, 35 + ) { 36 + // Convert where clause from GraphQL types to SQL types 37 + let where_clause = case pagination_params.where { 38 + option.Some(graphql_where) -> 39 + option.Some(where_converter.convert_where_clause(graphql_where)) 40 + option.None -> option.None 41 + } 42 + 43 + // Get total count for this collection (with where filter if present) 44 + let total_count = 45 + records.get_collection_count_with_where(db, collection_nsid, where_clause) 46 + |> result.map(option.Some) 47 + |> result.unwrap(option.None) 48 + 49 + // Fetch records from database for this collection with pagination 50 + case 51 + records.get_by_collection_paginated_with_where( 52 + db, 53 + collection_nsid, 54 + pagination_params.first, 55 + pagination_params.after, 56 + pagination_params.last, 57 + pagination_params.before, 58 + pagination_params.sort_by, 59 + where_clause, 60 + ) 61 + { 62 + Error(_) -> Ok(#([], option.None, False, False, option.None)) 63 + // Return empty result on error 64 + Ok(#(record_list, next_cursor, has_next_page, has_previous_page)) -> { 65 + // Convert database records to GraphQL values with cursors 66 + let graphql_records_with_cursors = 67 + list.map(record_list, fn(record) { 68 + let graphql_value = converters.record_to_graphql_value(record, db) 69 + // Generate cursor for this record 70 + let record_cursor = 71 + pagination.generate_cursor_from_record( 72 + record, 73 + pagination_params.sort_by, 74 + ) 75 + #(graphql_value, record_cursor) 76 + }) 77 + Ok(#( 78 + graphql_records_with_cursors, 79 + next_cursor, 80 + has_next_page, 81 + has_previous_page, 82 + total_count, 83 + )) 84 + } 85 + } 86 + } 87 + } 88 + 89 + /// Create a batch fetcher for join operations (forward and reverse) 90 + pub fn batch_fetcher(db: sqlight.Connection) { 91 + fn(uris: List(String), collection: String, field: option.Option(String)) -> Result( 92 + dataloader.BatchResult, 93 + String, 94 + ) { 95 + // Check if this is a forward join (field is None) or reverse join (field is Some) 96 + case field { 97 + option.None -> { 98 + // Determine if we're dealing with DIDs or URIs 99 + case uris { 100 + [] -> Ok(dict.new()) 101 + [first, ..] -> { 102 + case string.starts_with(first, "did:") { 103 + True -> { 104 + // DID join: fetch records by DID and collection 105 + case records.get_by_dids_and_collection(db, uris, collection) { 106 + Ok(record_list) -> { 107 + // Group records by DID 108 + let grouped = 109 + list.fold(record_list, dict.new(), fn(acc, record) { 110 + let graphql_value = 111 + converters.record_to_graphql_value(record, db) 112 + let existing = 113 + dict.get(acc, record.did) |> result.unwrap([]) 114 + dict.insert(acc, record.did, [graphql_value, ..existing]) 115 + }) 116 + Ok(grouped) 117 + } 118 + Error(_) -> Error("Failed to fetch records by DIDs") 119 + } 120 + } 121 + False -> { 122 + // Forward join: fetch records by their URIs 123 + case records.get_by_uris(db, uris) { 124 + Ok(record_list) -> { 125 + // Group records by URI 126 + let grouped = 127 + list.fold(record_list, dict.new(), fn(acc, record) { 128 + let graphql_value = 129 + converters.record_to_graphql_value(record, db) 130 + // For forward joins, return single record per URI 131 + dict.insert(acc, record.uri, [graphql_value]) 132 + }) 133 + Ok(grouped) 134 + } 135 + Error(_) -> Error("Failed to fetch records by URIs") 136 + } 137 + } 138 + } 139 + } 140 + } 141 + } 142 + option.Some(reference_field) -> { 143 + // Reverse join: fetch records that reference the parent URIs 144 + case 145 + records.get_by_reference_field(db, collection, reference_field, uris) 146 + { 147 + Ok(record_list) -> { 148 + // Group records by the parent URI they reference 149 + // Parse each record's JSON to extract the reference field value 150 + let grouped = 151 + list.fold(record_list, dict.new(), fn(acc, record) { 152 + let graphql_value = 153 + converters.record_to_graphql_value(record, db) 154 + // Extract the reference field from the record JSON to find parent URI 155 + case 156 + converters.extract_reference_uri(record.json, reference_field) 157 + { 158 + Ok(parent_uri) -> { 159 + let existing = 160 + dict.get(acc, parent_uri) |> result.unwrap([]) 161 + dict.insert(acc, parent_uri, [graphql_value, ..existing]) 162 + } 163 + Error(_) -> acc 164 + } 165 + }) 166 + Ok(grouped) 167 + } 168 + Error(_) -> 169 + Error( 170 + "Failed to fetch records by reference field: " <> reference_field, 171 + ) 172 + } 173 + } 174 + } 175 + } 176 + } 177 + 178 + /// Create a paginated batch fetcher for join operations with pagination 179 + pub fn paginated_batch_fetcher(db: sqlight.Connection) { 180 + fn( 181 + key: String, 182 + collection: String, 183 + field: option.Option(String), 184 + pagination_params: dataloader.PaginationParams, 185 + ) -> Result(dataloader.PaginatedBatchResult, String) { 186 + // Convert pagination params to database pagination params 187 + let db_first = pagination_params.first 188 + let db_after = pagination_params.after 189 + let db_last = pagination_params.last 190 + let db_before = pagination_params.before 191 + let db_sort_by = pagination_params.sort_by 192 + 193 + // Convert where clause from GraphQL to database format 194 + let db_where = case pagination_params.where { 195 + option.Some(where_clause) -> 196 + option.Some(where_converter.convert_where_clause(where_clause)) 197 + option.None -> option.None 198 + } 199 + 200 + // Check if this is a DID join (field is None) or reverse join (field is Some) 201 + case field { 202 + option.None -> { 203 + // DID join: key is the DID 204 + case 205 + records.get_by_dids_and_collection_paginated( 206 + db, 207 + key, 208 + collection, 209 + db_first, 210 + db_after, 211 + db_last, 212 + db_before, 213 + db_sort_by, 214 + db_where, 215 + ) 216 + { 217 + Ok(#( 218 + record_list, 219 + _next_cursor, 220 + has_next_page, 221 + has_previous_page, 222 + total_count, 223 + )) -> { 224 + // Convert records to GraphQL values with cursors 225 + let edges = 226 + list.map(record_list, fn(record) { 227 + let graphql_value = 228 + converters.record_to_graphql_value(record, db) 229 + let cursor = 230 + pagination.generate_cursor_from_record(record, db_sort_by) 231 + #(graphql_value, cursor) 232 + }) 233 + 234 + Ok(dataloader.PaginatedBatchResult( 235 + edges: edges, 236 + has_next_page: has_next_page, 237 + has_previous_page: has_previous_page, 238 + total_count: total_count, 239 + )) 240 + } 241 + Error(_) -> Error("Failed to fetch paginated records by DID") 242 + } 243 + } 244 + option.Some(reference_field) -> { 245 + // Reverse join: key is the parent URI 246 + case 247 + records.get_by_reference_field_paginated( 248 + db, 249 + collection, 250 + reference_field, 251 + key, 252 + db_first, 253 + db_after, 254 + db_last, 255 + db_before, 256 + db_sort_by, 257 + db_where, 258 + ) 259 + { 260 + Ok(#( 261 + record_list, 262 + _next_cursor, 263 + has_next_page, 264 + has_previous_page, 265 + total_count, 266 + )) -> { 267 + // Convert records to GraphQL values with cursors 268 + let edges = 269 + list.map(record_list, fn(record) { 270 + let graphql_value = 271 + converters.record_to_graphql_value(record, db) 272 + let cursor = 273 + pagination.generate_cursor_from_record(record, db_sort_by) 274 + #(graphql_value, cursor) 275 + }) 276 + 277 + Ok(dataloader.PaginatedBatchResult( 278 + edges: edges, 279 + has_next_page: has_next_page, 280 + has_previous_page: has_previous_page, 281 + total_count: total_count, 282 + )) 283 + } 284 + Error(_) -> 285 + Error( 286 + "Failed to fetch paginated records by reference field: " 287 + <> reference_field, 288 + ) 289 + } 290 + } 291 + } 292 + } 293 + } 294 + 295 + /// Create an aggregate fetcher for GROUP BY queries 296 + pub fn aggregate_fetcher(db: sqlight.Connection) { 297 + fn(collection_nsid: String, params: database.AggregateParams) { 298 + // Convert GraphQL where clause to SQL where clause 299 + let where_clause = case params.where { 300 + option.Some(graphql_where) -> 301 + option.Some(where_converter.convert_where_clause(graphql_where)) 302 + option.None -> option.None 303 + } 304 + 305 + // Convert GroupByFieldInput to types.GroupByField 306 + let group_by_fields = 307 + list.map(params.group_by, fn(gb) { 308 + case gb.interval { 309 + option.Some(interval) -> { 310 + let db_interval = case interval { 311 + aggregate.Hour -> types.Hour 312 + aggregate.Day -> types.Day 313 + aggregate.Week -> types.Week 314 + aggregate.Month -> types.Month 315 + } 316 + types.TruncatedField(gb.field, db_interval) 317 + } 318 + option.None -> types.SimpleField(gb.field) 319 + } 320 + }) 321 + 322 + // Call database aggregation function 323 + aggregates.get_aggregated_records( 324 + db, 325 + collection_nsid, 326 + group_by_fields, 327 + where_clause, 328 + params.order_by_desc, 329 + params.limit, 330 + ) 331 + |> result.map_error(fn(_) { "Failed to fetch aggregated records" }) 332 + } 333 + } 334 + 335 + /// Create a viewer fetcher for authenticated user info 336 + pub fn viewer_fetcher(db: sqlight.Connection) { 337 + fn(token: String) { 338 + case atproto_auth.verify_token(db, token) { 339 + Error(_) -> Error("Invalid or expired token") 340 + Ok(user_info) -> { 341 + // Get handle from actors table 342 + let handle = case actors.get(db, user_info.did) { 343 + Ok([actor, ..]) -> option.Some(actor.handle) 344 + _ -> option.None 345 + } 346 + Ok(#(user_info.did, handle)) 347 + } 348 + } 349 + } 350 + }
+757
server/src/graphql/lexicon/mutations.gleam
··· 1 + /// Mutation Resolvers for lexicon GraphQL API 2 + /// 3 + /// Implements GraphQL mutation resolvers with AT Protocol integration. 4 + /// These resolvers handle authentication, validation, and database operations. 5 + import actor_validator 6 + import atproto_auth 7 + import backfill 8 + import database/repositories/lexicons 9 + import database/repositories/records 10 + import dpop 11 + import gleam/dict 12 + import gleam/dynamic 13 + import gleam/dynamic/decode 14 + import gleam/erlang/process.{type Subject} 15 + import gleam/int 16 + import gleam/json 17 + import gleam/list 18 + import gleam/option 19 + import gleam/result 20 + import honk 21 + import honk/errors 22 + import lib/oauth/did_cache 23 + import sqlight 24 + import swell/schema 25 + import swell/value 26 + 27 + /// Context for mutation execution 28 + pub type MutationContext { 29 + MutationContext( 30 + db: sqlight.Connection, 31 + did_cache: Subject(did_cache.Message), 32 + signing_key: option.Option(String), 33 + atp_client_id: String, 34 + plc_url: String, 35 + collection_ids: List(String), 36 + external_collection_ids: List(String), 37 + ) 38 + } 39 + 40 + // ─── Private Auth Helper ─────────────────────────────────────────── 41 + 42 + /// Authenticated session info returned by auth helper 43 + type AuthenticatedSession { 44 + AuthenticatedSession( 45 + user_info: atproto_auth.UserInfo, 46 + session: atproto_auth.AtprotoSession, 47 + ) 48 + } 49 + 50 + /// Extract token, verify auth, ensure actor exists, get ATP session 51 + fn get_authenticated_session( 52 + resolver_ctx: schema.Context, 53 + ctx: MutationContext, 54 + ) -> Result(AuthenticatedSession, String) { 55 + // Step 1: Extract auth token from context data 56 + let token = case resolver_ctx.data { 57 + option.Some(value.Object(fields)) -> { 58 + case list.key_find(fields, "auth_token") { 59 + Ok(value.String(t)) -> Ok(t) 60 + Ok(_) -> Error("auth_token must be a string") 61 + Error(_) -> 62 + Error("Authentication required. Please provide Authorization header.") 63 + } 64 + } 65 + _ -> Error("Authentication required. Please provide Authorization header.") 66 + } 67 + 68 + use token <- result.try(token) 69 + 70 + // Step 2: Verify OAuth token 71 + use user_info <- result.try( 72 + atproto_auth.verify_token(ctx.db, token) 73 + |> result.map_error(fn(err) { 74 + case err { 75 + atproto_auth.UnauthorizedToken -> "Unauthorized" 76 + atproto_auth.TokenExpired -> "Token expired" 77 + atproto_auth.MissingAuthHeader -> "Missing authentication" 78 + atproto_auth.InvalidAuthHeader -> "Invalid authentication header" 79 + _ -> "Authentication error" 80 + } 81 + }), 82 + ) 83 + 84 + // Step 3: Ensure actor exists in database 85 + use is_new_actor <- result.try(actor_validator.ensure_actor_exists( 86 + ctx.db, 87 + user_info.did, 88 + ctx.plc_url, 89 + )) 90 + 91 + // If new actor, spawn backfill for all collections 92 + case is_new_actor { 93 + True -> { 94 + process.spawn_unlinked(fn() { 95 + backfill.backfill_collections_for_actor( 96 + ctx.db, 97 + user_info.did, 98 + ctx.collection_ids, 99 + ctx.external_collection_ids, 100 + ctx.plc_url, 101 + ) 102 + }) 103 + Nil 104 + } 105 + False -> Nil 106 + } 107 + 108 + // Step 4: Get AT Protocol session 109 + use session <- result.try( 110 + atproto_auth.get_atp_session( 111 + ctx.db, 112 + ctx.did_cache, 113 + token, 114 + ctx.signing_key, 115 + ctx.atp_client_id, 116 + ) 117 + |> result.map_error(fn(err) { 118 + case err { 119 + atproto_auth.SessionNotFound -> "Session not found" 120 + atproto_auth.SessionNotReady -> "Session not ready" 121 + atproto_auth.RefreshFailed(msg) -> "Token refresh failed: " <> msg 122 + atproto_auth.DIDResolutionFailed(msg) -> 123 + "DID resolution failed: " <> msg 124 + _ -> "Failed to get ATP session" 125 + } 126 + }), 127 + ) 128 + 129 + Ok(AuthenticatedSession(user_info: user_info, session: session)) 130 + } 131 + 132 + // ─── Private Blob Helpers ────────────────────────────────────────── 133 + 134 + /// Convert GraphQL value to JSON value (not string) 135 + fn graphql_value_to_json_value(val: value.Value) -> json.Json { 136 + case val { 137 + value.String(s) -> json.string(s) 138 + value.Int(i) -> json.int(i) 139 + value.Float(f) -> json.float(f) 140 + value.Boolean(b) -> json.bool(b) 141 + value.Null -> json.null() 142 + value.Enum(e) -> json.string(e) 143 + value.List(items) -> json.array(items, graphql_value_to_json_value) 144 + value.Object(fields) -> { 145 + json.object( 146 + fields 147 + |> list.map(fn(field) { 148 + let #(key, val) = field 149 + #(key, graphql_value_to_json_value(val)) 150 + }), 151 + ) 152 + } 153 + } 154 + } 155 + 156 + /// Get blob field paths from a lexicon for a given collection 157 + fn get_blob_paths( 158 + collection: String, 159 + lexicons: List(json.Json), 160 + ) -> List(List(String)) { 161 + let lexicon = 162 + list.find(lexicons, fn(lex) { 163 + case json.parse(json.to_string(lex), decode.at(["id"], decode.string)) { 164 + Ok(id) -> id == collection 165 + Error(_) -> False 166 + } 167 + }) 168 + 169 + case lexicon { 170 + Ok(lex) -> { 171 + let properties_decoder = 172 + decode.at( 173 + ["defs", "main", "record", "properties"], 174 + decode.dict(decode.string, decode.dynamic), 175 + ) 176 + case json.parse(json.to_string(lex), properties_decoder) { 177 + Ok(properties) -> extract_blob_paths_from_properties(properties, []) 178 + Error(_) -> [] 179 + } 180 + } 181 + Error(_) -> [] 182 + } 183 + } 184 + 185 + /// Recursively extract blob paths from lexicon properties 186 + fn extract_blob_paths_from_properties( 187 + properties: dict.Dict(String, dynamic.Dynamic), 188 + current_path: List(String), 189 + ) -> List(List(String)) { 190 + dict.fold(properties, [], fn(acc, field_name, field_def) { 191 + let field_path = list.append(current_path, [field_name]) 192 + let type_result = decode.run(field_def, decode.at(["type"], decode.string)) 193 + 194 + case type_result { 195 + Ok("blob") -> [field_path, ..acc] 196 + Ok("object") -> { 197 + let nested_props_result = 198 + decode.run( 199 + field_def, 200 + decode.at( 201 + ["properties"], 202 + decode.dict(decode.string, decode.dynamic), 203 + ), 204 + ) 205 + case nested_props_result { 206 + Ok(nested_props) -> { 207 + let nested_paths = 208 + extract_blob_paths_from_properties(nested_props, field_path) 209 + list.append(nested_paths, acc) 210 + } 211 + Error(_) -> acc 212 + } 213 + } 214 + Ok("array") -> { 215 + let items_type_result = 216 + decode.run(field_def, decode.at(["items", "type"], decode.string)) 217 + case items_type_result { 218 + Ok("blob") -> [field_path, ..acc] 219 + Ok("object") -> { 220 + let item_props_result = 221 + decode.run( 222 + field_def, 223 + decode.at( 224 + ["items", "properties"], 225 + decode.dict(decode.string, decode.dynamic), 226 + ), 227 + ) 228 + case item_props_result { 229 + Ok(item_props) -> { 230 + let nested_paths = 231 + extract_blob_paths_from_properties(item_props, field_path) 232 + list.append(nested_paths, acc) 233 + } 234 + Error(_) -> acc 235 + } 236 + } 237 + _ -> acc 238 + } 239 + } 240 + _ -> acc 241 + } 242 + }) 243 + } 244 + 245 + /// Transform blob inputs in a value from GraphQL format to AT Protocol format 246 + fn transform_blob_inputs( 247 + input: value.Value, 248 + blob_paths: List(List(String)), 249 + ) -> value.Value { 250 + transform_value_at_paths(input, blob_paths, []) 251 + } 252 + 253 + /// Recursively transform values at blob paths 254 + fn transform_value_at_paths( 255 + val: value.Value, 256 + blob_paths: List(List(String)), 257 + current_path: List(String), 258 + ) -> value.Value { 259 + case val { 260 + value.Object(fields) -> { 261 + let is_blob_path = 262 + list.any(blob_paths, fn(path) { 263 + path == current_path && current_path != [] 264 + }) 265 + 266 + case is_blob_path { 267 + True -> transform_blob_object(fields) 268 + False -> { 269 + value.Object( 270 + list.map(fields, fn(field) { 271 + let #(key, field_val) = field 272 + let new_path = list.append(current_path, [key]) 273 + #(key, transform_value_at_paths(field_val, blob_paths, new_path)) 274 + }), 275 + ) 276 + } 277 + } 278 + } 279 + value.List(items) -> { 280 + let is_blob_array_path = 281 + list.any(blob_paths, fn(path) { 282 + path == current_path && current_path != [] 283 + }) 284 + 285 + case is_blob_array_path { 286 + True -> { 287 + value.List( 288 + list.map(items, fn(item) { 289 + case item { 290 + value.Object(item_fields) -> transform_blob_object(item_fields) 291 + _ -> item 292 + } 293 + }), 294 + ) 295 + } 296 + False -> { 297 + let paths_through_here = 298 + list.filter(blob_paths, fn(path) { 299 + list.length(path) > list.length(current_path) 300 + && list.take(path, list.length(current_path)) == current_path 301 + }) 302 + 303 + case list.is_empty(paths_through_here) { 304 + True -> val 305 + False -> { 306 + value.List( 307 + list.map(items, fn(item) { 308 + transform_value_at_paths(item, blob_paths, current_path) 309 + }), 310 + ) 311 + } 312 + } 313 + } 314 + } 315 + } 316 + _ -> val 317 + } 318 + } 319 + 320 + /// Transform a BlobInput object to AT Protocol blob format 321 + fn transform_blob_object(fields: List(#(String, value.Value))) -> value.Value { 322 + let ref = case list.key_find(fields, "ref") { 323 + Ok(value.String(r)) -> r 324 + _ -> "" 325 + } 326 + let mime_type = case list.key_find(fields, "mimeType") { 327 + Ok(value.String(m)) -> m 328 + _ -> "" 329 + } 330 + let size = case list.key_find(fields, "size") { 331 + Ok(value.Int(s)) -> s 332 + _ -> 0 333 + } 334 + 335 + case ref != "" && mime_type != "" { 336 + True -> 337 + value.Object([ 338 + #("$type", value.String("blob")), 339 + #("ref", value.Object([#("$link", value.String(ref))])), 340 + #("mimeType", value.String(mime_type)), 341 + #("size", value.Int(size)), 342 + ]) 343 + False -> value.Object(fields) 344 + } 345 + } 346 + 347 + /// Decode base64 string to bit array 348 + fn decode_base64(base64_str: String) -> Result(BitArray, Nil) { 349 + Ok(do_erlang_base64_decode(base64_str)) 350 + } 351 + 352 + /// Extract blob fields from dynamic PDS response 353 + fn extract_blob_from_dynamic( 354 + blob_dynamic: dynamic.Dynamic, 355 + did: String, 356 + ) -> Result(value.Value, String) { 357 + let ref_link_decoder = { 358 + use link <- decode.field("$link", decode.string) 359 + decode.success(link) 360 + } 361 + 362 + let full_decoder = { 363 + use mime_type <- decode.field("mimeType", decode.string) 364 + use size <- decode.field("size", decode.int) 365 + use ref <- decode.field("ref", ref_link_decoder) 366 + decode.success(#(ref, mime_type, size)) 367 + } 368 + 369 + use #(ref, mime_type, size) <- result.try( 370 + decode.run(blob_dynamic, full_decoder) 371 + |> result.map_error(fn(_) { "Failed to decode blob fields" }), 372 + ) 373 + 374 + Ok( 375 + value.Object([ 376 + #("ref", value.String(ref)), 377 + #("mime_type", value.String(mime_type)), 378 + #("size", value.Int(size)), 379 + #("did", value.String(did)), 380 + ]), 381 + ) 382 + } 383 + 384 + /// Erlang FFI: base64:decode/1 returns BitArray directly (not Result) 385 + @external(erlang, "base64", "decode") 386 + fn do_erlang_base64_decode(a: String) -> BitArray 387 + 388 + // ─── Public Resolver Factories ───────────────────────────────────── 389 + 390 + /// Create a resolver factory for create mutations 391 + pub fn create_resolver_factory( 392 + collection: String, 393 + ctx: MutationContext, 394 + ) -> schema.Resolver { 395 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 396 + // Get authenticated session using helper 397 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 398 + 399 + // Get input and rkey from arguments 400 + let input_result = case schema.get_argument(resolver_ctx, "input") { 401 + option.Some(val) -> Ok(val) 402 + option.None -> Error("Missing required argument: input") 403 + } 404 + 405 + use input <- result.try(input_result) 406 + 407 + let rkey = case schema.get_argument(resolver_ctx, "rkey") { 408 + option.Some(value.String(r)) -> option.Some(r) 409 + _ -> option.None 410 + } 411 + 412 + // Fetch lexicons for validation and blob path extraction 413 + use all_lexicon_records <- result.try( 414 + lexicons.get_all(ctx.db) 415 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 416 + ) 417 + 418 + use all_lex_jsons <- result.try( 419 + all_lexicon_records 420 + |> list.try_map(fn(lex) { 421 + honk.parse_json_string(lex.json) 422 + |> result.map_error(fn(e) { 423 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 424 + }) 425 + }), 426 + ) 427 + 428 + // Transform blob inputs from GraphQL format to AT Protocol format 429 + let blob_paths = get_blob_paths(collection, all_lex_jsons) 430 + let transformed_input = transform_blob_inputs(input, blob_paths) 431 + let record_json_value = graphql_value_to_json_value(transformed_input) 432 + let record_json_string = json.to_string(record_json_value) 433 + 434 + // Validate against lexicon 435 + use _ <- result.try( 436 + honk.validate_record(all_lex_jsons, collection, record_json_value) 437 + |> result.map_error(fn(err) { 438 + "Validation failed: " <> errors.to_string(err) 439 + }), 440 + ) 441 + 442 + // Call createRecord via AT Protocol 443 + let create_body = 444 + case rkey { 445 + option.Some(r) -> 446 + json.object([ 447 + #("repo", json.string(auth.user_info.did)), 448 + #("collection", json.string(collection)), 449 + #("rkey", json.string(r)), 450 + #("record", record_json_value), 451 + ]) 452 + option.None -> 453 + json.object([ 454 + #("repo", json.string(auth.user_info.did)), 455 + #("collection", json.string(collection)), 456 + #("record", record_json_value), 457 + ]) 458 + } 459 + |> json.to_string 460 + 461 + let pds_url = 462 + auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.createRecord" 463 + 464 + use response <- result.try( 465 + dpop.make_dpop_request("POST", pds_url, auth.session, create_body) 466 + |> result.map_error(fn(_) { "Failed to create record on PDS" }), 467 + ) 468 + 469 + use #(uri, cid) <- result.try(case response.status { 470 + 200 | 201 -> { 471 + let response_decoder = { 472 + use uri <- decode.field("uri", decode.string) 473 + use cid <- decode.field("cid", decode.string) 474 + decode.success(#(uri, cid)) 475 + } 476 + json.parse(response.body, response_decoder) 477 + |> result.map_error(fn(_) { 478 + "Failed to parse PDS success response. Body: " <> response.body 479 + }) 480 + } 481 + _ -> 482 + Error( 483 + "PDS request failed with status " 484 + <> int.to_string(response.status) 485 + <> ": " 486 + <> response.body, 487 + ) 488 + }) 489 + 490 + // Index the created record in the database 491 + use _ <- result.try( 492 + records.insert( 493 + ctx.db, 494 + uri, 495 + cid, 496 + auth.user_info.did, 497 + collection, 498 + record_json_string, 499 + ) 500 + |> result.map_error(fn(_) { "Failed to index record in database" }), 501 + ) 502 + 503 + Ok( 504 + value.Object([ 505 + #("uri", value.String(uri)), 506 + #("cid", value.String(cid)), 507 + #("did", value.String(auth.user_info.did)), 508 + #("collection", value.String(collection)), 509 + #("indexedAt", value.String("")), 510 + #("value", input), 511 + ]), 512 + ) 513 + } 514 + } 515 + 516 + /// Create a resolver factory for update mutations 517 + pub fn update_resolver_factory( 518 + collection: String, 519 + ctx: MutationContext, 520 + ) -> schema.Resolver { 521 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 522 + // Get authenticated session using helper 523 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 524 + 525 + // Get rkey (required) and input from arguments 526 + let rkey_result = case schema.get_argument(resolver_ctx, "rkey") { 527 + option.Some(value.String(r)) -> Ok(r) 528 + option.Some(_) -> Error("rkey must be a string") 529 + option.None -> Error("Missing required argument: rkey") 530 + } 531 + 532 + use rkey <- result.try(rkey_result) 533 + 534 + let input_result = case schema.get_argument(resolver_ctx, "input") { 535 + option.Some(val) -> Ok(val) 536 + option.None -> Error("Missing required argument: input") 537 + } 538 + 539 + use input <- result.try(input_result) 540 + 541 + // Fetch lexicons for validation and blob path extraction 542 + use all_lexicon_records <- result.try( 543 + lexicons.get_all(ctx.db) 544 + |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 545 + ) 546 + 547 + use all_lex_jsons <- result.try( 548 + all_lexicon_records 549 + |> list.try_map(fn(lex) { 550 + honk.parse_json_string(lex.json) 551 + |> result.map_error(fn(e) { 552 + "Failed to parse lexicon JSON: " <> errors.to_string(e) 553 + }) 554 + }), 555 + ) 556 + 557 + // Transform blob inputs from GraphQL format to AT Protocol format 558 + let blob_paths = get_blob_paths(collection, all_lex_jsons) 559 + let transformed_input = transform_blob_inputs(input, blob_paths) 560 + let record_json_value = graphql_value_to_json_value(transformed_input) 561 + let record_json_string = json.to_string(record_json_value) 562 + 563 + // Validate against lexicon 564 + use _ <- result.try( 565 + honk.validate_record(all_lex_jsons, collection, record_json_value) 566 + |> result.map_error(fn(err) { 567 + "Validation failed: " <> errors.to_string(err) 568 + }), 569 + ) 570 + 571 + // Call putRecord via AT Protocol 572 + let update_body = 573 + json.object([ 574 + #("repo", json.string(auth.user_info.did)), 575 + #("collection", json.string(collection)), 576 + #("rkey", json.string(rkey)), 577 + #("record", record_json_value), 578 + ]) 579 + |> json.to_string 580 + 581 + let pds_url = 582 + auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.putRecord" 583 + 584 + use response <- result.try( 585 + dpop.make_dpop_request("POST", pds_url, auth.session, update_body) 586 + |> result.map_error(fn(_) { "Failed to update record on PDS" }), 587 + ) 588 + 589 + use #(uri, cid) <- result.try(case response.status { 590 + 200 | 201 -> { 591 + let response_decoder = { 592 + use uri <- decode.field("uri", decode.string) 593 + use cid <- decode.field("cid", decode.string) 594 + decode.success(#(uri, cid)) 595 + } 596 + json.parse(response.body, response_decoder) 597 + |> result.map_error(fn(_) { 598 + "Failed to parse PDS success response. Body: " <> response.body 599 + }) 600 + } 601 + _ -> 602 + Error( 603 + "PDS request failed with status " 604 + <> int.to_string(response.status) 605 + <> ": " 606 + <> response.body, 607 + ) 608 + }) 609 + 610 + // Update the record in the database 611 + use _ <- result.try( 612 + records.update(ctx.db, uri, cid, record_json_string) 613 + |> result.map_error(fn(_) { "Failed to update record in database" }), 614 + ) 615 + 616 + Ok( 617 + value.Object([ 618 + #("uri", value.String(uri)), 619 + #("cid", value.String(cid)), 620 + #("did", value.String(auth.user_info.did)), 621 + #("collection", value.String(collection)), 622 + #("indexedAt", value.String("")), 623 + #("value", input), 624 + ]), 625 + ) 626 + } 627 + } 628 + 629 + /// Create a resolver factory for delete mutations 630 + pub fn delete_resolver_factory( 631 + collection: String, 632 + ctx: MutationContext, 633 + ) -> schema.Resolver { 634 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 635 + // Get authenticated session using helper 636 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 637 + 638 + // Get rkey (required) from arguments 639 + let rkey_result = case schema.get_argument(resolver_ctx, "rkey") { 640 + option.Some(value.String(r)) -> Ok(r) 641 + option.Some(_) -> Error("rkey must be a string") 642 + option.None -> Error("Missing required argument: rkey") 643 + } 644 + 645 + use rkey <- result.try(rkey_result) 646 + 647 + // Build the record URI to be deleted 648 + let uri = "at://" <> auth.user_info.did <> "/" <> collection <> "/" <> rkey 649 + 650 + // Call deleteRecord via AT Protocol 651 + let delete_body = 652 + json.object([ 653 + #("repo", json.string(auth.user_info.did)), 654 + #("collection", json.string(collection)), 655 + #("rkey", json.string(rkey)), 656 + ]) 657 + |> json.to_string 658 + 659 + let pds_url = 660 + auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.deleteRecord" 661 + 662 + use response <- result.try( 663 + dpop.make_dpop_request("POST", pds_url, auth.session, delete_body) 664 + |> result.map_error(fn(_) { "Failed to delete record on PDS" }), 665 + ) 666 + 667 + use _ <- result.try(case response.status { 668 + 200 | 201 | 204 -> Ok(Nil) 669 + _ -> 670 + Error( 671 + "PDS delete request failed with status " 672 + <> int.to_string(response.status) 673 + <> ": " 674 + <> response.body, 675 + ) 676 + }) 677 + 678 + // Delete the record from the database 679 + use _ <- result.try( 680 + records.delete(ctx.db, uri) 681 + |> result.map_error(fn(_) { "Failed to delete record from database" }), 682 + ) 683 + 684 + Ok(value.Object([#("uri", value.String(uri))])) 685 + } 686 + } 687 + 688 + /// Create a resolver for uploadBlob mutation 689 + pub fn upload_blob_resolver_factory(ctx: MutationContext) -> schema.Resolver { 690 + fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 691 + // Get authenticated session using helper 692 + use auth <- result.try(get_authenticated_session(resolver_ctx, ctx)) 693 + 694 + // Get data and mimeType from arguments 695 + let data_result = case schema.get_argument(resolver_ctx, "data") { 696 + option.Some(value.String(d)) -> Ok(d) 697 + option.Some(_) -> Error("data must be a string") 698 + option.None -> Error("Missing required argument: data") 699 + } 700 + 701 + use data_base64 <- result.try(data_result) 702 + 703 + let mime_type_result = case schema.get_argument(resolver_ctx, "mimeType") { 704 + option.Some(value.String(m)) -> Ok(m) 705 + option.Some(_) -> Error("mimeType must be a string") 706 + option.None -> Error("Missing required argument: mimeType") 707 + } 708 + 709 + use mime_type <- result.try(mime_type_result) 710 + 711 + // Decode base64 data to binary 712 + use binary_data <- result.try( 713 + decode_base64(data_base64) 714 + |> result.map_error(fn(_) { "Failed to decode base64 data" }), 715 + ) 716 + 717 + // Upload blob to PDS 718 + let pds_url = 719 + auth.session.pds_endpoint <> "/xrpc/com.atproto.repo.uploadBlob" 720 + 721 + use response <- result.try( 722 + dpop.make_dpop_request_with_binary( 723 + "POST", 724 + pds_url, 725 + auth.session, 726 + binary_data, 727 + mime_type, 728 + ) 729 + |> result.map_error(fn(_) { "Failed to upload blob to PDS" }), 730 + ) 731 + 732 + use blob_ref <- result.try(case response.status { 733 + 200 | 201 -> { 734 + let response_decoder = { 735 + use blob <- decode.field("blob", decode.dynamic) 736 + decode.success(blob) 737 + } 738 + 739 + case json.parse(response.body, response_decoder) { 740 + Ok(blob_dynamic) -> 741 + extract_blob_from_dynamic(blob_dynamic, auth.user_info.did) 742 + Error(_) -> 743 + Error("Failed to parse PDS response. Body: " <> response.body) 744 + } 745 + } 746 + _ -> 747 + Error( 748 + "PDS request failed with status " 749 + <> int.to_string(response.status) 750 + <> ": " 751 + <> response.body, 752 + ) 753 + }) 754 + 755 + Ok(blob_ref) 756 + } 757 + }
+265
server/src/graphql/lexicon/schema.gleam
··· 1 + /// Lexicon GraphQL schema entry point 2 + /// 3 + /// Public API for building and executing the lexicon-driven GraphQL schema. 4 + /// External code should import this module for all lexicon GraphQL operations. 5 + import backfill 6 + import database/repositories/config as config_repo 7 + import database/repositories/lexicons 8 + import gleam/dict 9 + import gleam/dynamic/decode 10 + import gleam/erlang/process.{type Subject} 11 + import gleam/json 12 + import gleam/list 13 + import gleam/option 14 + import gleam/result 15 + import gleam/string 16 + import graphql/lexicon/converters 17 + import graphql/lexicon/fetchers 18 + import graphql/lexicon/mutations 19 + import lexicon_graphql 20 + import lexicon_graphql/schema/database 21 + import lib/oauth/did_cache 22 + import sqlight 23 + import swell/executor 24 + import swell/schema 25 + import swell/value 26 + 27 + /// Build a GraphQL schema from database lexicons 28 + /// 29 + /// This is exposed for WebSocket subscriptions to build the schema once 30 + /// and reuse it for multiple subscription executions. 31 + pub fn build_schema_from_db( 32 + db: sqlight.Connection, 33 + did_cache: Subject(did_cache.Message), 34 + signing_key: option.Option(String), 35 + atp_client_id: String, 36 + plc_url: String, 37 + domain_authority: String, 38 + ) -> Result(schema.Schema, String) { 39 + // Step 1: Fetch lexicons from database 40 + use lexicon_records <- result.try( 41 + lexicons.get_all(db) 42 + |> result.map_error(fn(_) { "Failed to fetch lexicons from database" }), 43 + ) 44 + 45 + // Step 2: Parse lexicon JSON into structured Lexicon types 46 + let parsed_lexicons = 47 + lexicon_records 48 + |> list.filter_map(fn(lex) { 49 + case lexicon_graphql.parse_lexicon(lex.json) { 50 + Ok(parsed) -> Ok(parsed) 51 + Error(_) -> Error(Nil) 52 + } 53 + }) 54 + 55 + // Check if we got any valid lexicons 56 + case parsed_lexicons { 57 + [] -> Error("No valid lexicons found in database") 58 + _ -> { 59 + // Step 3: Create fetchers 60 + let record_fetcher = fetchers.record_fetcher(db) 61 + let batch_fetcher = fetchers.batch_fetcher(db) 62 + let paginated_batch_fetcher = fetchers.paginated_batch_fetcher(db) 63 + let aggregate_fetcher = fetchers.aggregate_fetcher(db) 64 + let viewer_fetcher = fetchers.viewer_fetcher(db) 65 + 66 + // Step 4: Determine local and external collections for backfill 67 + let collection_ids = 68 + parsed_lexicons 69 + |> list.filter_map(fn(lex) { 70 + case 71 + backfill.nsid_matches_domain_authority(lex.id, domain_authority) 72 + { 73 + True -> Ok(lex.id) 74 + False -> Error(Nil) 75 + } 76 + }) 77 + 78 + let external_collection_ids = 79 + parsed_lexicons 80 + |> list.filter_map(fn(lex) { 81 + case 82 + backfill.nsid_matches_domain_authority(lex.id, domain_authority) 83 + { 84 + True -> Error(Nil) 85 + False -> Ok(lex.id) 86 + } 87 + }) 88 + 89 + // Step 5: Create mutation resolver factories 90 + let mutation_ctx = 91 + mutations.MutationContext( 92 + db: db, 93 + did_cache: did_cache, 94 + signing_key: signing_key, 95 + atp_client_id: atp_client_id, 96 + plc_url: plc_url, 97 + collection_ids: collection_ids, 98 + external_collection_ids: external_collection_ids, 99 + ) 100 + 101 + let create_factory = 102 + option.Some(fn(collection) { 103 + mutations.create_resolver_factory(collection, mutation_ctx) 104 + }) 105 + 106 + let update_factory = 107 + option.Some(fn(collection) { 108 + mutations.update_resolver_factory(collection, mutation_ctx) 109 + }) 110 + 111 + let delete_factory = 112 + option.Some(fn(collection) { 113 + mutations.delete_resolver_factory(collection, mutation_ctx) 114 + }) 115 + 116 + let upload_blob_factory = 117 + option.Some(fn() { 118 + mutations.upload_blob_resolver_factory(mutation_ctx) 119 + }) 120 + 121 + // Step 6: Build schema with database-backed resolvers, mutations, and subscriptions 122 + database.build_schema_with_subscriptions( 123 + parsed_lexicons, 124 + record_fetcher, 125 + option.Some(batch_fetcher), 126 + option.Some(paginated_batch_fetcher), 127 + create_factory, 128 + update_factory, 129 + delete_factory, 130 + upload_blob_factory, 131 + option.Some(aggregate_fetcher), 132 + option.Some(viewer_fetcher), 133 + ) 134 + } 135 + } 136 + } 137 + 138 + /// Execute a GraphQL query against lexicons in the database 139 + /// 140 + /// This fetches lexicons, builds a schema with database resolvers, 141 + /// executes the query, and returns the result as JSON. 142 + pub fn execute_query_with_db( 143 + db: sqlight.Connection, 144 + query_string: String, 145 + variables_json_str: String, 146 + auth_token: Result(String, Nil), 147 + did_cache: Subject(did_cache.Message), 148 + signing_key: option.Option(String), 149 + atp_client_id: String, 150 + plc_url: String, 151 + ) -> Result(String, String) { 152 + // Get domain authority from database 153 + let domain_authority = case config_repo.get(db, "domain_authority") { 154 + Ok(authority) -> authority 155 + Error(_) -> "" 156 + } 157 + 158 + // Build the schema 159 + use graphql_schema <- result.try(build_schema_from_db( 160 + db, 161 + did_cache, 162 + signing_key, 163 + atp_client_id, 164 + plc_url, 165 + domain_authority, 166 + )) 167 + 168 + // Create context with auth token if provided 169 + let ctx_data = case auth_token { 170 + Ok(token) -> { 171 + // Add auth token to context for mutation resolvers 172 + option.Some(value.Object([#("auth_token", value.String(token))])) 173 + } 174 + Error(_) -> option.None 175 + } 176 + 177 + // Convert json variables to Dict(String, value.Value) 178 + let variables_dict = json_string_to_variables_dict(variables_json_str) 179 + 180 + let ctx = schema.context_with_variables(ctx_data, variables_dict) 181 + 182 + // Execute the query 183 + use response <- result.try(executor.execute(query_string, graphql_schema, ctx)) 184 + 185 + // Format the response as JSON 186 + Ok(format_response(response)) 187 + } 188 + 189 + /// Format an executor.Response as JSON string 190 + /// Per GraphQL spec, only include "errors" field when there are actual errors 191 + pub fn format_response(response: executor.Response) -> String { 192 + let data_json = value_to_json(response.data) 193 + 194 + case response.errors { 195 + [] -> "{\"data\": " <> data_json <> "}" 196 + errors -> { 197 + let error_strings = 198 + list.map(errors, fn(err) { 199 + let message_json = json.string(err.message) |> json.to_string 200 + let path_json = 201 + json.array(err.path, of: json.string) |> json.to_string 202 + "{\"message\": " <> message_json <> ", \"path\": " <> path_json <> "}" 203 + }) 204 + 205 + let errors_json = "[" <> string.join(error_strings, ",") <> "]" 206 + "{\"data\": " <> data_json <> ", \"errors\": " <> errors_json <> "}" 207 + } 208 + } 209 + } 210 + 211 + /// Convert JSON string variables to Dict(String, value.Value) 212 + /// Exported for use by subscription handlers 213 + pub fn json_string_to_variables_dict( 214 + json_string: String, 215 + ) -> dict.Dict(String, value.Value) { 216 + // First try to extract the "variables" field from the JSON 217 + let variables_decoder = { 218 + use vars <- decode.field("variables", decode.dynamic) 219 + decode.success(vars) 220 + } 221 + 222 + case json.parse(json_string, variables_decoder) { 223 + Ok(dyn) -> { 224 + // Convert dynamic to value.Value 225 + case converters.json_dynamic_to_value(dyn) { 226 + value.Object(fields) -> dict.from_list(fields) 227 + _ -> dict.new() 228 + } 229 + } 230 + Error(_) -> dict.new() 231 + } 232 + } 233 + 234 + /// Re-export parse_json_to_value for WebSocket handler 235 + pub fn parse_json_to_value(json_str: String) -> Result(value.Value, String) { 236 + converters.parse_json_to_value(json_str) 237 + } 238 + 239 + // ─── Private Helpers ─────────────────────────────────────────────── 240 + 241 + /// Convert a GraphQL value to JSON string 242 + fn value_to_json(val: value.Value) -> String { 243 + case val { 244 + value.Null -> "null" 245 + value.Int(i) -> json.int(i) |> json.to_string 246 + value.Float(f) -> json.float(f) |> json.to_string 247 + value.String(s) -> json.string(s) |> json.to_string 248 + value.Boolean(b) -> json.bool(b) |> json.to_string 249 + value.Enum(e) -> json.string(e) |> json.to_string 250 + value.List(items) -> { 251 + let item_jsons = list.map(items, value_to_json) 252 + "[" <> string.join(item_jsons, ",") <> "]" 253 + } 254 + value.Object(fields) -> { 255 + let field_jsons = 256 + list.map(fields, fn(field) { 257 + let #(key, v) = field 258 + let key_json = json.string(key) |> json.to_string 259 + let value_json = value_to_json(v) 260 + key_json <> ": " <> value_json 261 + }) 262 + "{" <> string.join(field_jsons, ",") <> "}" 263 + } 264 + } 265 + }
-753
server/src/graphql_gleam.gleam
··· 1 - /// Pure Gleam GraphQL Implementation 2 - /// 3 - /// This module provides GraphQL schema building and query execution 4 - import atproto_auth 5 - import backfill 6 - import database/queries/aggregates 7 - import database/queries/pagination 8 - import database/repositories/actors 9 - import database/repositories/config as config_repo 10 - import database/repositories/lexicons 11 - import database/repositories/records 12 - import database/types 13 - import gleam/dict 14 - import gleam/dynamic 15 - import gleam/dynamic/decode 16 - import gleam/erlang/process.{type Subject} 17 - import gleam/json 18 - import gleam/list 19 - import gleam/option 20 - import gleam/result 21 - import gleam/string 22 - import graphql/where_converter 23 - import lexicon_graphql 24 - import lexicon_graphql/input/aggregate 25 - import lexicon_graphql/query/dataloader 26 - import lexicon_graphql/schema/database 27 - import lib/oauth/did_cache 28 - import mutation_resolvers 29 - import sqlight 30 - import swell/executor 31 - import swell/schema 32 - import swell/value 33 - 34 - /// Build a GraphQL schema from database lexicons 35 - /// 36 - /// This is exposed for WebSocket subscriptions to build the schema once 37 - /// and reuse it for multiple subscription executions. 38 - pub fn build_schema_from_db( 39 - db: sqlight.Connection, 40 - did_cache: Subject(did_cache.Message), 41 - signing_key: option.Option(String), 42 - atp_client_id: String, 43 - plc_url: String, 44 - domain_authority: String, 45 - ) -> Result(schema.Schema, String) { 46 - // Step 1: Fetch lexicons from database 47 - use lexicon_records <- result.try( 48 - lexicons.get_all(db) 49 - |> result.map_error(fn(_) { "Failed to fetch lexicons from database" }), 50 - ) 51 - 52 - // Step 2: Parse lexicon JSON into structured Lexicon types 53 - let parsed_lexicons = 54 - lexicon_records 55 - |> list.filter_map(fn(lex) { 56 - case lexicon_graphql.parse_lexicon(lex.json) { 57 - Ok(parsed) -> Ok(parsed) 58 - Error(_) -> Error(Nil) 59 - } 60 - }) 61 - 62 - // Check if we got any valid lexicons 63 - case parsed_lexicons { 64 - [] -> Error("No valid lexicons found in database") 65 - _ -> { 66 - // Step 3: Create a record fetcher function that queries the database with pagination 67 - let record_fetcher = fn( 68 - collection_nsid: String, 69 - pagination_params: dataloader.PaginationParams, 70 - ) -> Result( 71 - #( 72 - List(#(value.Value, String)), 73 - option.Option(String), 74 - Bool, 75 - Bool, 76 - option.Option(Int), 77 - ), 78 - String, 79 - ) { 80 - // Convert where clause from GraphQL types to SQL types 81 - let where_clause = case pagination_params.where { 82 - option.Some(graphql_where) -> 83 - option.Some(where_converter.convert_where_clause(graphql_where)) 84 - option.None -> option.None 85 - } 86 - 87 - // Get total count for this collection (with where filter if present) 88 - let total_count = 89 - records.get_collection_count_with_where( 90 - db, 91 - collection_nsid, 92 - where_clause, 93 - ) 94 - |> result.map(option.Some) 95 - |> result.unwrap(option.None) 96 - 97 - // Fetch records from database for this collection with pagination 98 - case 99 - records.get_by_collection_paginated_with_where( 100 - db, 101 - collection_nsid, 102 - pagination_params.first, 103 - pagination_params.after, 104 - pagination_params.last, 105 - pagination_params.before, 106 - pagination_params.sort_by, 107 - where_clause, 108 - ) 109 - { 110 - Error(_) -> Ok(#([], option.None, False, False, option.None)) 111 - // Return empty result on error 112 - Ok(#(record_list, next_cursor, has_next_page, has_previous_page)) -> { 113 - // Convert database records to GraphQL values with cursors 114 - let graphql_records_with_cursors = 115 - list.map(record_list, fn(record) { 116 - let graphql_value = record_to_graphql_value(record, db) 117 - // Generate cursor for this record 118 - let record_cursor = 119 - pagination.generate_cursor_from_record( 120 - record, 121 - pagination_params.sort_by, 122 - ) 123 - #(graphql_value, record_cursor) 124 - }) 125 - Ok(#( 126 - graphql_records_with_cursors, 127 - next_cursor, 128 - has_next_page, 129 - has_previous_page, 130 - total_count, 131 - )) 132 - } 133 - } 134 - } 135 - 136 - // Step 3.5: Create a batch fetcher function for join operations 137 - let batch_fetcher = fn( 138 - uris: List(String), 139 - collection: String, 140 - field: option.Option(String), 141 - ) -> Result(dataloader.BatchResult, String) { 142 - // Check if this is a forward join (field is None) or reverse join (field is Some) 143 - case field { 144 - option.None -> { 145 - // Determine if we're dealing with DIDs or URIs 146 - case uris { 147 - [] -> Ok(dict.new()) 148 - [first, ..] -> { 149 - case string.starts_with(first, "did:") { 150 - True -> { 151 - // DID join: fetch records by DID and collection 152 - case 153 - records.get_by_dids_and_collection(db, uris, collection) 154 - { 155 - Ok(record_list) -> { 156 - // Group records by DID 157 - let grouped = 158 - list.fold(record_list, dict.new(), fn(acc, record) { 159 - let graphql_value = 160 - record_to_graphql_value(record, db) 161 - let existing = 162 - dict.get(acc, record.did) |> result.unwrap([]) 163 - dict.insert(acc, record.did, [ 164 - graphql_value, 165 - ..existing 166 - ]) 167 - }) 168 - Ok(grouped) 169 - } 170 - Error(_) -> Error("Failed to fetch records by DIDs") 171 - } 172 - } 173 - False -> { 174 - // Forward join: fetch records by their URIs 175 - case records.get_by_uris(db, uris) { 176 - Ok(record_list) -> { 177 - // Group records by URI 178 - let grouped = 179 - list.fold(record_list, dict.new(), fn(acc, record) { 180 - let graphql_value = 181 - record_to_graphql_value(record, db) 182 - // For forward joins, return single record per URI 183 - dict.insert(acc, record.uri, [graphql_value]) 184 - }) 185 - Ok(grouped) 186 - } 187 - Error(_) -> Error("Failed to fetch records by URIs") 188 - } 189 - } 190 - } 191 - } 192 - } 193 - } 194 - option.Some(reference_field) -> { 195 - // Reverse join: fetch records that reference the parent URIs 196 - case 197 - records.get_by_reference_field( 198 - db, 199 - collection, 200 - reference_field, 201 - uris, 202 - ) 203 - { 204 - Ok(record_list) -> { 205 - // Group records by the parent URI they reference 206 - // Parse each record's JSON to extract the reference field value 207 - let grouped = 208 - list.fold(record_list, dict.new(), fn(acc, record) { 209 - let graphql_value = record_to_graphql_value(record, db) 210 - // Extract the reference field from the record JSON to find parent URI 211 - case extract_reference_uri(record.json, reference_field) { 212 - Ok(parent_uri) -> { 213 - let existing = 214 - dict.get(acc, parent_uri) |> result.unwrap([]) 215 - dict.insert(acc, parent_uri, [graphql_value, ..existing]) 216 - } 217 - Error(_) -> acc 218 - } 219 - }) 220 - Ok(grouped) 221 - } 222 - Error(_) -> 223 - Error( 224 - "Failed to fetch records by reference field: " 225 - <> reference_field, 226 - ) 227 - } 228 - } 229 - } 230 - } 231 - 232 - // Step 3.6: Create a paginated batch fetcher function for join operations with pagination 233 - let paginated_batch_fetcher = fn( 234 - key: String, 235 - collection: String, 236 - field: option.Option(String), 237 - pagination_params: dataloader.PaginationParams, 238 - ) -> Result(dataloader.PaginatedBatchResult, String) { 239 - // Convert pagination params to database pagination params 240 - let db_first = pagination_params.first 241 - let db_after = pagination_params.after 242 - let db_last = pagination_params.last 243 - let db_before = pagination_params.before 244 - let db_sort_by = pagination_params.sort_by 245 - 246 - // Convert where clause from GraphQL to database format 247 - let db_where = case pagination_params.where { 248 - option.Some(where_clause) -> 249 - option.Some(where_converter.convert_where_clause(where_clause)) 250 - option.None -> option.None 251 - } 252 - 253 - // Check if this is a DID join (field is None) or reverse join (field is Some) 254 - case field { 255 - option.None -> { 256 - // DID join: key is the DID 257 - case 258 - records.get_by_dids_and_collection_paginated( 259 - db, 260 - key, 261 - collection, 262 - db_first, 263 - db_after, 264 - db_last, 265 - db_before, 266 - db_sort_by, 267 - db_where, 268 - ) 269 - { 270 - Ok(#( 271 - record_list, 272 - _next_cursor, 273 - has_next_page, 274 - has_previous_page, 275 - total_count, 276 - )) -> { 277 - // Convert records to GraphQL values with cursors 278 - let edges = 279 - list.map(record_list, fn(record) { 280 - let graphql_value = record_to_graphql_value(record, db) 281 - let cursor = 282 - pagination.generate_cursor_from_record(record, db_sort_by) 283 - #(graphql_value, cursor) 284 - }) 285 - 286 - Ok(dataloader.PaginatedBatchResult( 287 - edges: edges, 288 - has_next_page: has_next_page, 289 - has_previous_page: has_previous_page, 290 - total_count: total_count, 291 - )) 292 - } 293 - Error(_) -> Error("Failed to fetch paginated records by DID") 294 - } 295 - } 296 - option.Some(reference_field) -> { 297 - // Reverse join: key is the parent URI 298 - case 299 - records.get_by_reference_field_paginated( 300 - db, 301 - collection, 302 - reference_field, 303 - key, 304 - db_first, 305 - db_after, 306 - db_last, 307 - db_before, 308 - db_sort_by, 309 - db_where, 310 - ) 311 - { 312 - Ok(#( 313 - record_list, 314 - _next_cursor, 315 - has_next_page, 316 - has_previous_page, 317 - total_count, 318 - )) -> { 319 - // Convert records to GraphQL values with cursors 320 - let edges = 321 - list.map(record_list, fn(record) { 322 - let graphql_value = record_to_graphql_value(record, db) 323 - let cursor = 324 - pagination.generate_cursor_from_record(record, db_sort_by) 325 - #(graphql_value, cursor) 326 - }) 327 - 328 - Ok(dataloader.PaginatedBatchResult( 329 - edges: edges, 330 - has_next_page: has_next_page, 331 - has_previous_page: has_previous_page, 332 - total_count: total_count, 333 - )) 334 - } 335 - Error(_) -> 336 - Error( 337 - "Failed to fetch paginated records by reference field: " 338 - <> reference_field, 339 - ) 340 - } 341 - } 342 - } 343 - } 344 - 345 - // Step 4: Determine local and external collections for backfill 346 - let collection_ids = 347 - parsed_lexicons 348 - |> list.filter_map(fn(lex) { 349 - case 350 - backfill.nsid_matches_domain_authority(lex.id, domain_authority) 351 - { 352 - True -> Ok(lex.id) 353 - // Local collection, include 354 - False -> Error(Nil) 355 - // External collection, skip 356 - } 357 - }) 358 - 359 - let external_collection_ids = 360 - parsed_lexicons 361 - |> list.filter_map(fn(lex) { 362 - case 363 - backfill.nsid_matches_domain_authority(lex.id, domain_authority) 364 - { 365 - True -> Error(Nil) 366 - // Local collection, skip 367 - False -> Ok(lex.id) 368 - // External collection, include 369 - } 370 - }) 371 - 372 - // Step 5: Create mutation resolver factories 373 - let mutation_ctx = 374 - mutation_resolvers.MutationContext( 375 - db: db, 376 - did_cache: did_cache, 377 - signing_key: signing_key, 378 - atp_client_id: atp_client_id, 379 - plc_url: plc_url, 380 - collection_ids: collection_ids, 381 - external_collection_ids: external_collection_ids, 382 - ) 383 - 384 - let create_factory = 385 - option.Some(fn(collection) { 386 - mutation_resolvers.create_resolver_factory(collection, mutation_ctx) 387 - }) 388 - 389 - let update_factory = 390 - option.Some(fn(collection) { 391 - mutation_resolvers.update_resolver_factory(collection, mutation_ctx) 392 - }) 393 - 394 - let delete_factory = 395 - option.Some(fn(collection) { 396 - mutation_resolvers.delete_resolver_factory(collection, mutation_ctx) 397 - }) 398 - 399 - let upload_blob_factory = 400 - option.Some(fn() { 401 - mutation_resolvers.upload_blob_resolver_factory(mutation_ctx) 402 - }) 403 - 404 - // Step 5.5: Create an aggregate fetcher function 405 - let aggregate_fetcher = fn( 406 - collection_nsid: String, 407 - params: database.AggregateParams, 408 - ) { 409 - // Convert GraphQL where clause to SQL where clause 410 - let where_clause = case params.where { 411 - option.Some(graphql_where) -> 412 - option.Some(where_converter.convert_where_clause(graphql_where)) 413 - option.None -> option.None 414 - } 415 - 416 - // Convert GroupByFieldInput to types.GroupByField 417 - let group_by_fields = 418 - list.map(params.group_by, fn(gb) { 419 - case gb.interval { 420 - option.Some(interval) -> { 421 - let db_interval = case interval { 422 - aggregate.Hour -> types.Hour 423 - aggregate.Day -> types.Day 424 - aggregate.Week -> types.Week 425 - aggregate.Month -> types.Month 426 - } 427 - types.TruncatedField(gb.field, db_interval) 428 - } 429 - option.None -> types.SimpleField(gb.field) 430 - } 431 - }) 432 - 433 - // Call database aggregation function 434 - aggregates.get_aggregated_records( 435 - db, 436 - collection_nsid, 437 - group_by_fields, 438 - where_clause, 439 - params.order_by_desc, 440 - params.limit, 441 - ) 442 - |> result.map_error(fn(_) { "Failed to fetch aggregated records" }) 443 - } 444 - 445 - // Step 5.6: Create a viewer fetcher function for authenticated user info 446 - let viewer_fetcher = fn(token: String) { 447 - case atproto_auth.verify_token(db, token) { 448 - Error(_) -> Error("Invalid or expired token") 449 - Ok(user_info) -> { 450 - // Get handle from actors table 451 - let handle = case actors.get(db, user_info.did) { 452 - Ok([actor, ..]) -> option.Some(actor.handle) 453 - _ -> option.None 454 - } 455 - Ok(#(user_info.did, handle)) 456 - } 457 - } 458 - } 459 - 460 - // Step 6: Build schema with database-backed resolvers, mutations, and subscriptions 461 - database.build_schema_with_subscriptions( 462 - parsed_lexicons, 463 - record_fetcher, 464 - option.Some(batch_fetcher), 465 - option.Some(paginated_batch_fetcher), 466 - create_factory, 467 - update_factory, 468 - delete_factory, 469 - upload_blob_factory, 470 - option.Some(aggregate_fetcher), 471 - option.Some(viewer_fetcher), 472 - ) 473 - } 474 - } 475 - } 476 - 477 - /// Execute a GraphQL query against lexicons in the database 478 - /// 479 - /// This fetches lexicons, builds a schema with database resolvers, 480 - /// executes the query, and returns the result as JSON. 481 - pub fn execute_query_with_db( 482 - db: sqlight.Connection, 483 - query_string: String, 484 - variables_json_str: String, 485 - auth_token: Result(String, Nil), 486 - did_cache: Subject(did_cache.Message), 487 - signing_key: option.Option(String), 488 - atp_client_id: String, 489 - plc_url: String, 490 - ) -> Result(String, String) { 491 - // Get domain authority from database 492 - let domain_authority = case config_repo.get(db, "domain_authority") { 493 - Ok(authority) -> authority 494 - Error(_) -> "" 495 - } 496 - 497 - // Build the schema 498 - use graphql_schema <- result.try(build_schema_from_db( 499 - db, 500 - did_cache, 501 - signing_key, 502 - atp_client_id, 503 - plc_url, 504 - domain_authority, 505 - )) 506 - 507 - // Create context with auth token if provided 508 - let ctx_data = case auth_token { 509 - Ok(token) -> { 510 - // Add auth token to context for mutation resolvers 511 - option.Some(value.Object([#("auth_token", value.String(token))])) 512 - } 513 - Error(_) -> option.None 514 - } 515 - 516 - // Convert json variables to Dict(String, value.Value) 517 - let variables_dict = json_string_to_variables_dict(variables_json_str) 518 - 519 - let ctx = schema.context_with_variables(ctx_data, variables_dict) 520 - 521 - // Execute the query 522 - use response <- result.try(executor.execute(query_string, graphql_schema, ctx)) 523 - 524 - // Format the response as JSON 525 - Ok(format_response(response)) 526 - } 527 - 528 - /// Convert a database Record to a GraphQL value.Value 529 - /// 530 - /// Creates an Object with all the record metadata plus the parsed JSON value 531 - pub fn record_to_graphql_value( 532 - record: types.Record, 533 - db: sqlight.Connection, 534 - ) -> value.Value { 535 - // Parse the record JSON and convert to GraphQL value 536 - let value_object = case parse_json_to_value(record.json) { 537 - Ok(val) -> val 538 - Error(_) -> value.Object([]) 539 - // Fallback to empty object on parse error 540 - } 541 - 542 - // Look up actor handle from actor table 543 - let actor_handle = case actors.get(db, record.did) { 544 - Ok([actor, ..]) -> value.String(actor.handle) 545 - _ -> value.Null 546 - } 547 - 548 - // Create the full record object with metadata and value 549 - value.Object([ 550 - #("uri", value.String(record.uri)), 551 - #("cid", value.String(record.cid)), 552 - #("did", value.String(record.did)), 553 - #("collection", value.String(record.collection)), 554 - #("indexedAt", value.String(record.indexed_at)), 555 - #("actorHandle", actor_handle), 556 - #("value", value_object), 557 - ]) 558 - } 559 - 560 - /// Parse a JSON string and convert it to a GraphQL value.Value 561 - pub fn parse_json_to_value(json_str: String) -> Result(value.Value, String) { 562 - // Parse JSON string to dynamic value 563 - case json.parse(json_str, decode.dynamic) { 564 - Ok(dyn) -> Ok(dynamic_to_value(dyn)) 565 - Error(_) -> Error("Failed to parse JSON") 566 - } 567 - } 568 - 569 - /// Convert a dynamic value to a GraphQL value.Value 570 - fn dynamic_to_value(dyn: dynamic.Dynamic) -> value.Value { 571 - // Try different decoders in order 572 - case decode.run(dyn, decode.string) { 573 - Ok(s) -> value.String(s) 574 - Error(_) -> 575 - case decode.run(dyn, decode.int) { 576 - Ok(i) -> value.Int(i) 577 - Error(_) -> 578 - case decode.run(dyn, decode.float) { 579 - Ok(f) -> value.Float(f) 580 - Error(_) -> 581 - case decode.run(dyn, decode.bool) { 582 - Ok(b) -> value.Boolean(b) 583 - Error(_) -> 584 - case decode.run(dyn, decode.list(decode.dynamic)) { 585 - Ok(items) -> { 586 - let converted_items = list.map(items, dynamic_to_value) 587 - value.List(converted_items) 588 - } 589 - Error(_) -> 590 - case 591 - decode.run( 592 - dyn, 593 - decode.dict(decode.string, decode.dynamic), 594 - ) 595 - { 596 - Ok(dict) -> { 597 - let fields = 598 - dict 599 - |> dict.to_list 600 - |> list.map(fn(entry) { 601 - let #(key, val) = entry 602 - #(key, dynamic_to_value(val)) 603 - }) 604 - value.Object(fields) 605 - } 606 - Error(_) -> value.Null 607 - } 608 - } 609 - } 610 - } 611 - } 612 - } 613 - } 614 - 615 - /// Format an executor.Response as JSON string 616 - /// Per GraphQL spec, only include "errors" field when there are actual errors 617 - pub fn format_response(response: executor.Response) -> String { 618 - let data_json = value_to_json(response.data) 619 - 620 - case response.errors { 621 - [] -> "{\"data\": " <> data_json <> "}" 622 - errors -> { 623 - let error_strings = 624 - list.map(errors, fn(err) { 625 - let message_json = json.string(err.message) |> json.to_string 626 - let path_json = 627 - json.array(err.path, of: json.string) |> json.to_string 628 - 629 - "{\"message\": " <> message_json <> ", \"path\": " <> path_json <> "}" 630 - }) 631 - 632 - let errors_json = "[" <> string.join(error_strings, ",") <> "]" 633 - "{\"data\": " <> data_json <> ", \"errors\": " <> errors_json <> "}" 634 - } 635 - } 636 - } 637 - 638 - /// Convert a GraphQL value to JSON string 639 - fn value_to_json(val: value.Value) -> String { 640 - case val { 641 - value.Null -> "null" 642 - value.Int(i) -> json.int(i) |> json.to_string 643 - value.Float(f) -> json.float(f) |> json.to_string 644 - value.String(s) -> json.string(s) |> json.to_string 645 - value.Boolean(b) -> json.bool(b) |> json.to_string 646 - value.Enum(e) -> json.string(e) |> json.to_string 647 - value.List(items) -> { 648 - let item_jsons = list.map(items, value_to_json) 649 - "[" <> string.join(item_jsons, ",") <> "]" 650 - } 651 - value.Object(fields) -> { 652 - let field_jsons = 653 - list.map(fields, fn(field) { 654 - let #(key, value) = field 655 - let key_json = json.string(key) |> json.to_string 656 - let value_json = value_to_json(value) 657 - key_json <> ": " <> value_json 658 - }) 659 - "{" <> string.join(field_jsons, ",") <> "}" 660 - } 661 - } 662 - } 663 - 664 - /// Convert JSON string variables to Dict(String, value.Value) 665 - /// Exported for use by subscription handlers 666 - pub fn json_string_to_variables_dict( 667 - json_string: String, 668 - ) -> dict.Dict(String, value.Value) { 669 - // First try to extract the "variables" field from the JSON 670 - let variables_decoder = { 671 - use vars <- decode.field("variables", decode.dynamic) 672 - decode.success(vars) 673 - } 674 - 675 - case json.parse(json_string, variables_decoder) { 676 - Ok(dyn) -> { 677 - // Convert dynamic to value.Value 678 - case json_dynamic_to_value(dyn) { 679 - value.Object(fields) -> dict.from_list(fields) 680 - _ -> dict.new() 681 - } 682 - } 683 - Error(_) -> dict.new() 684 - } 685 - } 686 - 687 - /// Convert a dynamic JSON value to graphql value.Value 688 - fn json_dynamic_to_value(dyn: dynamic.Dynamic) -> value.Value { 689 - // Try different decoders in order 690 - case decode.run(dyn, decode.string) { 691 - Ok(s) -> value.String(s) 692 - Error(_) -> 693 - case decode.run(dyn, decode.int) { 694 - Ok(i) -> value.Int(i) 695 - Error(_) -> 696 - case decode.run(dyn, decode.float) { 697 - Ok(f) -> value.Float(f) 698 - Error(_) -> 699 - case decode.run(dyn, decode.bool) { 700 - Ok(b) -> value.Boolean(b) 701 - Error(_) -> 702 - // Try as a list 703 - case decode.run(dyn, decode.list(decode.dynamic)) { 704 - Ok(items) -> 705 - value.List(list.map(items, json_dynamic_to_value)) 706 - Error(_) -> 707 - // Try as an object (dict) 708 - case 709 - decode.run( 710 - dyn, 711 - decode.dict(decode.string, decode.dynamic), 712 - ) 713 - { 714 - Ok(d) -> 715 - value.Object( 716 - list.map(dict.to_list(d), fn(pair) { 717 - #(pair.0, json_dynamic_to_value(pair.1)) 718 - }), 719 - ) 720 - Error(_) -> value.Null 721 - } 722 - } 723 - } 724 - } 725 - } 726 - } 727 - } 728 - 729 - /// Extract a reference URI from a record's JSON 730 - /// This handles both simple string fields (at-uri) and strongRef objects 731 - fn extract_reference_uri( 732 - json_str: String, 733 - field_name: String, 734 - ) -> Result(String, Nil) { 735 - // Parse the JSON 736 - case parse_json_to_value(json_str) { 737 - Ok(value.Object(fields)) -> { 738 - // Find the field 739 - case list.key_find(fields, field_name) { 740 - Ok(value.String(uri)) -> Ok(uri) 741 - Ok(value.Object(ref_fields)) -> { 742 - // Handle strongRef: { "uri": "...", "cid": "..." } 743 - case list.key_find(ref_fields, "uri") { 744 - Ok(value.String(uri)) -> Ok(uri) 745 - _ -> Error(Nil) 746 - } 747 - } 748 - _ -> Error(Nil) 749 - } 750 - } 751 - _ -> Error(Nil) 752 - } 753 - }
+2 -2
server/src/handlers/graphql.gleam
··· 11 11 import gleam/option 12 12 import gleam/result 13 13 import gleam/string 14 - import graphql_gleam 14 + import graphql/lexicon/schema as lexicon_schema 15 15 import lib/oauth/did_cache 16 16 import sqlight 17 17 import wisp ··· 140 140 ) -> wisp.Response { 141 141 // Use the new pure Gleam GraphQL implementation 142 142 case 143 - graphql_gleam.execute_query_with_db( 143 + lexicon_schema.execute_query_with_db( 144 144 db, 145 145 query, 146 146 variables_json_str,
+5 -5
server/src/handlers/graphql_ws.gleam
··· 10 10 import gleam/option.{type Option, None, Some} 11 11 import gleam/result 12 12 import gleam/string 13 - import graphql_gleam 13 + import graphql/lexicon/schema as lexicon_schema 14 14 import graphql_ws 15 15 import lib/oauth/did_cache 16 16 import logging ··· 48 48 db: sqlight.Connection, 49 49 ) -> value.Value { 50 50 // Parse the record JSON value 51 - let value_object = case graphql_gleam.parse_json_to_value(event.value) { 51 + let value_object = case lexicon_schema.parse_json_to_value(event.value) { 52 52 Ok(val) -> val 53 53 Error(_) -> value.Object([]) 54 54 } ··· 91 91 use response <- result.try(executor.execute(query, graphql_schema, ctx)) 92 92 93 93 // Format the response as JSON 94 - Ok(graphql_gleam.format_response(response)) 94 + Ok(lexicon_schema.format_response(response)) 95 95 } 96 96 97 97 /// Convert collection name to GraphQL field name format ··· 195 195 196 196 // Build GraphQL schema for subscriptions 197 197 let graphql_schema = case 198 - graphql_gleam.build_schema_from_db( 198 + lexicon_schema.build_schema_from_db( 199 199 db, 200 200 did_cache, 201 201 signing_key, ··· 358 358 // Parse variables from JSON 359 359 let variables = case variables_opt { 360 360 Some(vars_json) -> 361 - graphql_gleam.json_string_to_variables_dict(vars_json) 361 + lexicon_schema.json_string_to_variables_dict(vars_json) 362 362 None -> dict.new() 363 363 } 364 364
+2 -2
server/src/lib/mcp/tools/graphql.gleam
··· 2 2 import gleam/json 3 3 import gleam/option.{type Option} 4 4 import gleam/result 5 - import graphql_gleam 5 + import graphql/lexicon/schema as lexicon_schema 6 6 import lib/oauth/did_cache 7 7 import sqlight 8 8 ··· 15 15 signing_key: Option(String), 16 16 plc_url: String, 17 17 ) -> Result(json.Json, String) { 18 - use result_str <- result.try(graphql_gleam.execute_query_with_db( 18 + use result_str <- result.try(lexicon_schema.execute_query_with_db( 19 19 db, 20 20 query, 21 21 variables_json,
-1022
server/src/mutation_resolvers.gleam
··· 1 - /// Mutation Resolvers 2 - /// 3 - /// Implements GraphQL mutation resolvers with AT Protocol integration. 4 - /// These resolvers handle authentication, validation, and database operations. 5 - import actor_validator 6 - import atproto_auth 7 - import backfill 8 - import database/repositories/lexicons 9 - import database/repositories/records 10 - import dpop 11 - import gleam/dict 12 - import gleam/dynamic 13 - import gleam/dynamic/decode 14 - import gleam/erlang/process.{type Subject} 15 - import gleam/int 16 - import gleam/json 17 - import gleam/list 18 - import gleam/option 19 - import gleam/result 20 - import honk 21 - import honk/errors 22 - import lib/oauth/did_cache 23 - import sqlight 24 - import swell/schema 25 - import swell/value 26 - 27 - /// Context for mutation execution 28 - pub type MutationContext { 29 - MutationContext( 30 - db: sqlight.Connection, 31 - did_cache: Subject(did_cache.Message), 32 - signing_key: option.Option(String), 33 - atp_client_id: String, 34 - plc_url: String, 35 - collection_ids: List(String), 36 - external_collection_ids: List(String), 37 - ) 38 - } 39 - 40 - /// Convert GraphQL value to JSON value (not string) 41 - fn graphql_value_to_json_value(val: value.Value) -> json.Json { 42 - case val { 43 - value.String(s) -> json.string(s) 44 - value.Int(i) -> json.int(i) 45 - value.Float(f) -> json.float(f) 46 - value.Boolean(b) -> json.bool(b) 47 - value.Null -> json.null() 48 - value.Enum(e) -> json.string(e) 49 - value.List(items) -> json.array(items, graphql_value_to_json_value) 50 - value.Object(fields) -> { 51 - json.object( 52 - fields 53 - |> list.map(fn(field) { 54 - let #(key, val) = field 55 - #(key, graphql_value_to_json_value(val)) 56 - }), 57 - ) 58 - } 59 - } 60 - } 61 - 62 - /// Get blob field paths from a lexicon for a given collection 63 - /// Returns a list of paths, where each path is a list of field names 64 - /// e.g., [["avatar"], ["banner"], ["nested", "image"]] 65 - fn get_blob_paths( 66 - collection: String, 67 - lexicons: List(json.Json), 68 - ) -> List(List(String)) { 69 - // Find the lexicon for this collection 70 - let lexicon = 71 - list.find(lexicons, fn(lex) { 72 - case json.parse(json.to_string(lex), decode.at(["id"], decode.string)) { 73 - Ok(id) -> id == collection 74 - Error(_) -> False 75 - } 76 - }) 77 - 78 - case lexicon { 79 - Ok(lex) -> { 80 - // Get defs.main.record.properties 81 - let properties_decoder = 82 - decode.at( 83 - ["defs", "main", "record", "properties"], 84 - decode.dict(decode.string, decode.dynamic), 85 - ) 86 - case json.parse(json.to_string(lex), properties_decoder) { 87 - Ok(properties) -> extract_blob_paths_from_properties(properties, []) 88 - Error(_) -> [] 89 - } 90 - } 91 - Error(_) -> [] 92 - } 93 - } 94 - 95 - /// Recursively extract blob paths from lexicon properties 96 - fn extract_blob_paths_from_properties( 97 - properties: dict.Dict(String, dynamic.Dynamic), 98 - current_path: List(String), 99 - ) -> List(List(String)) { 100 - dict.fold(properties, [], fn(acc, field_name, field_def) { 101 - let field_path = list.append(current_path, [field_name]) 102 - 103 - // Check field type 104 - let type_result = decode.run(field_def, decode.at(["type"], decode.string)) 105 - 106 - case type_result { 107 - Ok("blob") -> { 108 - // Found a blob field 109 - [field_path, ..acc] 110 - } 111 - Ok("object") -> { 112 - // Recurse into nested object properties 113 - let nested_props_result = 114 - decode.run( 115 - field_def, 116 - decode.at( 117 - ["properties"], 118 - decode.dict(decode.string, decode.dynamic), 119 - ), 120 - ) 121 - case nested_props_result { 122 - Ok(nested_props) -> { 123 - let nested_paths = 124 - extract_blob_paths_from_properties(nested_props, field_path) 125 - list.append(nested_paths, acc) 126 - } 127 - Error(_) -> acc 128 - } 129 - } 130 - Ok("array") -> { 131 - // Check if array items are blobs or objects containing blobs 132 - let items_type_result = 133 - decode.run(field_def, decode.at(["items", "type"], decode.string)) 134 - case items_type_result { 135 - Ok("blob") -> { 136 - // Array of blobs - the path points to the array 137 - [field_path, ..acc] 138 - } 139 - Ok("object") -> { 140 - // Array of objects - recurse into item properties 141 - let item_props_result = 142 - decode.run( 143 - field_def, 144 - decode.at( 145 - ["items", "properties"], 146 - decode.dict(decode.string, decode.dynamic), 147 - ), 148 - ) 149 - case item_props_result { 150 - Ok(item_props) -> { 151 - let nested_paths = 152 - extract_blob_paths_from_properties(item_props, field_path) 153 - list.append(nested_paths, acc) 154 - } 155 - Error(_) -> acc 156 - } 157 - } 158 - _ -> acc 159 - } 160 - } 161 - _ -> acc 162 - } 163 - }) 164 - } 165 - 166 - /// Transform blob inputs in a value from GraphQL format to AT Protocol format 167 - /// GraphQL: { ref: "bafyrei...", mimeType: "image/jpeg", size: 12345 } 168 - /// AT Proto: { "$type": "blob", ref: { "$link": "bafyrei..." }, mimeType: "image/jpeg", size: 12345 } 169 - fn transform_blob_inputs( 170 - input: value.Value, 171 - blob_paths: List(List(String)), 172 - ) -> value.Value { 173 - transform_value_at_paths(input, blob_paths, []) 174 - } 175 - 176 - /// Recursively transform values at blob paths 177 - fn transform_value_at_paths( 178 - val: value.Value, 179 - blob_paths: List(List(String)), 180 - current_path: List(String), 181 - ) -> value.Value { 182 - case val { 183 - value.Object(fields) -> { 184 - // Check if current path matches any blob path exactly 185 - let is_blob_path = 186 - list.any(blob_paths, fn(path) { 187 - path == current_path && current_path != [] 188 - }) 189 - 190 - case is_blob_path { 191 - True -> { 192 - // Transform this object from BlobInput to AT Protocol format 193 - transform_blob_object(fields) 194 - } 195 - False -> { 196 - // Recurse into object fields 197 - value.Object( 198 - list.map(fields, fn(field) { 199 - let #(key, field_val) = field 200 - let new_path = list.append(current_path, [key]) 201 - #(key, transform_value_at_paths(field_val, blob_paths, new_path)) 202 - }), 203 - ) 204 - } 205 - } 206 - } 207 - value.List(items) -> { 208 - // Check if current path is a blob array path 209 - let is_blob_array_path = 210 - list.any(blob_paths, fn(path) { 211 - path == current_path && current_path != [] 212 - }) 213 - 214 - case is_blob_array_path { 215 - True -> { 216 - // Transform each item as a blob 217 - value.List( 218 - list.map(items, fn(item) { 219 - case item { 220 - value.Object(item_fields) -> transform_blob_object(item_fields) 221 - _ -> item 222 - } 223 - }), 224 - ) 225 - } 226 - False -> { 227 - // Check if any blob path starts with current path (for arrays of objects) 228 - let paths_through_here = 229 - list.filter(blob_paths, fn(path) { 230 - list.length(path) > list.length(current_path) 231 - && list.take(path, list.length(current_path)) == current_path 232 - }) 233 - 234 - case list.is_empty(paths_through_here) { 235 - True -> val 236 - False -> { 237 - // Recurse into array items with the same path 238 - value.List( 239 - list.map(items, fn(item) { 240 - transform_value_at_paths(item, blob_paths, current_path) 241 - }), 242 - ) 243 - } 244 - } 245 - } 246 - } 247 - } 248 - _ -> val 249 - } 250 - } 251 - 252 - /// Transform a BlobInput object to AT Protocol blob format 253 - fn transform_blob_object(fields: List(#(String, value.Value))) -> value.Value { 254 - // Extract ref, mimeType, size from fields 255 - let ref = case list.key_find(fields, "ref") { 256 - Ok(value.String(r)) -> r 257 - _ -> "" 258 - } 259 - let mime_type = case list.key_find(fields, "mimeType") { 260 - Ok(value.String(m)) -> m 261 - _ -> "" 262 - } 263 - let size = case list.key_find(fields, "size") { 264 - Ok(value.Int(s)) -> s 265 - _ -> 0 266 - } 267 - 268 - // Only transform if it looks like a valid BlobInput 269 - case ref != "" && mime_type != "" { 270 - True -> 271 - value.Object([ 272 - #("$type", value.String("blob")), 273 - #("ref", value.Object([#("$link", value.String(ref))])), 274 - #("mimeType", value.String(mime_type)), 275 - #("size", value.Int(size)), 276 - ]) 277 - False -> 278 - // Not a valid BlobInput, return as-is 279 - value.Object(fields) 280 - } 281 - } 282 - 283 - /// Create a resolver factory for create mutations 284 - pub fn create_resolver_factory( 285 - collection: String, 286 - ctx: MutationContext, 287 - ) -> schema.Resolver { 288 - fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 289 - // Step 1: Extract auth token from context data 290 - let token = case resolver_ctx.data { 291 - option.Some(value.Object(fields)) -> { 292 - case list.key_find(fields, "auth_token") { 293 - Ok(value.String(t)) -> Ok(t) 294 - Ok(_) -> Error("auth_token must be a string") 295 - Error(_) -> 296 - Error( 297 - "Authentication required. Please provide Authorization header.", 298 - ) 299 - } 300 - } 301 - _ -> 302 - Error("Authentication required. Please provide Authorization header.") 303 - } 304 - 305 - use token <- result.try(token) 306 - 307 - // Step 2: Get input and rkey from arguments 308 - let input_result = case schema.get_argument(resolver_ctx, "input") { 309 - option.Some(val) -> Ok(val) 310 - option.None -> Error("Missing required argument: input") 311 - } 312 - 313 - use input <- result.try(input_result) 314 - 315 - let rkey = case schema.get_argument(resolver_ctx, "rkey") { 316 - option.Some(value.String(r)) -> option.Some(r) 317 - _ -> option.None 318 - } 319 - 320 - // Step 3: Verify OAuth token and get AT Protocol session 321 - use user_info <- result.try( 322 - atproto_auth.verify_token(ctx.db, token) 323 - |> result.map_error(fn(err) { 324 - case err { 325 - atproto_auth.UnauthorizedToken -> "Unauthorized" 326 - atproto_auth.TokenExpired -> "Token expired" 327 - atproto_auth.MissingAuthHeader -> "Missing authentication" 328 - atproto_auth.InvalidAuthHeader -> "Invalid authentication header" 329 - _ -> "Authentication error" 330 - } 331 - }), 332 - ) 333 - 334 - // Step 4: Ensure actor exists in database 335 - use is_new_actor <- result.try(actor_validator.ensure_actor_exists( 336 - ctx.db, 337 - user_info.did, 338 - ctx.plc_url, 339 - )) 340 - 341 - // If new actor, spawn backfill for all collections 342 - case is_new_actor { 343 - True -> { 344 - process.spawn_unlinked(fn() { 345 - backfill.backfill_collections_for_actor( 346 - ctx.db, 347 - user_info.did, 348 - ctx.collection_ids, 349 - ctx.external_collection_ids, 350 - ctx.plc_url, 351 - ) 352 - }) 353 - Nil 354 - } 355 - False -> Nil 356 - } 357 - 358 - use session <- result.try( 359 - atproto_auth.get_atp_session( 360 - ctx.db, 361 - ctx.did_cache, 362 - token, 363 - ctx.signing_key, 364 - ctx.atp_client_id, 365 - ) 366 - |> result.map_error(fn(err) { 367 - case err { 368 - atproto_auth.SessionNotFound -> "Session not found" 369 - atproto_auth.SessionNotReady -> "Session not ready" 370 - atproto_auth.RefreshFailed(msg) -> "Token refresh failed: " <> msg 371 - atproto_auth.DIDResolutionFailed(msg) -> 372 - "DID resolution failed: " <> msg 373 - _ -> "Failed to get ATP session" 374 - } 375 - }), 376 - ) 377 - 378 - // Step 5: Fetch lexicons for validation and blob path extraction 379 - use all_lexicon_records <- result.try( 380 - lexicons.get_all(ctx.db) 381 - |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 382 - ) 383 - 384 - // Parse all lexicon JSON strings 385 - use all_lex_jsons <- result.try( 386 - all_lexicon_records 387 - |> list.try_map(fn(lex) { 388 - honk.parse_json_string(lex.json) 389 - |> result.map_error(fn(e) { 390 - "Failed to parse lexicon JSON: " <> errors.to_string(e) 391 - }) 392 - }), 393 - ) 394 - 395 - // Step 6: Transform blob inputs from GraphQL format to AT Protocol format 396 - let blob_paths = get_blob_paths(collection, all_lex_jsons) 397 - let transformed_input = transform_blob_inputs(input, blob_paths) 398 - 399 - // Convert transformed input to JSON for validation and AT Protocol 400 - let record_json_value = graphql_value_to_json_value(transformed_input) 401 - let record_json_string = json.to_string(record_json_value) 402 - 403 - // Step 7: Validate against lexicon 404 - use _ <- result.try( 405 - honk.validate_record(all_lex_jsons, collection, record_json_value) 406 - |> result.map_error(fn(err) { 407 - "Validation failed: " <> errors.to_string(err) 408 - }), 409 - ) 410 - 411 - { 412 - // Step 8: Call createRecord via AT Protocol 413 - // Omit rkey field when not provided to let PDS auto-generate TID 414 - let create_body = 415 - case rkey { 416 - option.Some(r) -> 417 - json.object([ 418 - #("repo", json.string(user_info.did)), 419 - #("collection", json.string(collection)), 420 - #("rkey", json.string(r)), 421 - #("record", record_json_value), 422 - ]) 423 - option.None -> 424 - json.object([ 425 - #("repo", json.string(user_info.did)), 426 - #("collection", json.string(collection)), 427 - #("record", record_json_value), 428 - ]) 429 - } 430 - |> json.to_string 431 - 432 - let pds_url = 433 - session.pds_endpoint <> "/xrpc/com.atproto.repo.createRecord" 434 - 435 - use response <- result.try( 436 - dpop.make_dpop_request("POST", pds_url, session, create_body) 437 - |> result.map_error(fn(_) { "Failed to create record on PDS" }), 438 - ) 439 - 440 - // Step 8: Check HTTP status and parse response 441 - use #(uri, cid) <- result.try(case response.status { 442 - 200 | 201 -> { 443 - // Parse successful response 444 - let response_decoder = { 445 - use uri <- decode.field("uri", decode.string) 446 - use cid <- decode.field("cid", decode.string) 447 - decode.success(#(uri, cid)) 448 - } 449 - 450 - json.parse(response.body, response_decoder) 451 - |> result.map_error(fn(_) { 452 - "Failed to parse PDS success response. Body: " <> response.body 453 - }) 454 - } 455 - _ -> { 456 - // Return actual PDS error 457 - Error( 458 - "PDS request failed with status " 459 - <> int.to_string(response.status) 460 - <> ": " 461 - <> response.body, 462 - ) 463 - } 464 - }) 465 - 466 - // Step 9: Index the created record in the database 467 - use _ <- result.try( 468 - records.insert( 469 - ctx.db, 470 - uri, 471 - cid, 472 - user_info.did, 473 - collection, 474 - record_json_string, 475 - ) 476 - |> result.map_error(fn(_) { "Failed to index record in database" }), 477 - ) 478 - 479 - // Step 10: Return the created record as a GraphQL value 480 - // The field resolvers expect the record data under "value" key 481 - // (same structure as query results) 482 - Ok( 483 - value.Object([ 484 - #("uri", value.String(uri)), 485 - #("cid", value.String(cid)), 486 - #("did", value.String(user_info.did)), 487 - #("collection", value.String(collection)), 488 - #("indexedAt", value.String("")), 489 - #("value", input), 490 - ]), 491 - ) 492 - } 493 - } 494 - } 495 - 496 - /// Create a resolver factory for update mutations 497 - pub fn update_resolver_factory( 498 - collection: String, 499 - ctx: MutationContext, 500 - ) -> schema.Resolver { 501 - fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 502 - // Step 1: Extract auth token from context data 503 - let token = case resolver_ctx.data { 504 - option.Some(value.Object(fields)) -> { 505 - case list.key_find(fields, "auth_token") { 506 - Ok(value.String(t)) -> Ok(t) 507 - Ok(_) -> Error("auth_token must be a string") 508 - Error(_) -> 509 - Error( 510 - "Authentication required. Please provide Authorization header.", 511 - ) 512 - } 513 - } 514 - _ -> 515 - Error("Authentication required. Please provide Authorization header.") 516 - } 517 - 518 - use token <- result.try(token) 519 - 520 - // Step 2: Get rkey (required) and input from arguments 521 - let rkey_result = case schema.get_argument(resolver_ctx, "rkey") { 522 - option.Some(value.String(r)) -> Ok(r) 523 - option.Some(_) -> Error("rkey must be a string") 524 - option.None -> Error("Missing required argument: rkey") 525 - } 526 - 527 - use rkey <- result.try(rkey_result) 528 - 529 - let input_result = case schema.get_argument(resolver_ctx, "input") { 530 - option.Some(val) -> Ok(val) 531 - option.None -> Error("Missing required argument: input") 532 - } 533 - 534 - use input <- result.try(input_result) 535 - 536 - // Step 3: Verify OAuth token and get AT Protocol session 537 - use user_info <- result.try( 538 - atproto_auth.verify_token(ctx.db, token) 539 - |> result.map_error(fn(err) { 540 - case err { 541 - atproto_auth.UnauthorizedToken -> "Unauthorized" 542 - atproto_auth.TokenExpired -> "Token expired" 543 - atproto_auth.MissingAuthHeader -> "Missing authentication" 544 - atproto_auth.InvalidAuthHeader -> "Invalid authentication header" 545 - _ -> "Authentication error" 546 - } 547 - }), 548 - ) 549 - 550 - // Step 4: Ensure actor exists in database 551 - use is_new_actor <- result.try(actor_validator.ensure_actor_exists( 552 - ctx.db, 553 - user_info.did, 554 - ctx.plc_url, 555 - )) 556 - 557 - // If new actor, spawn backfill for all collections 558 - case is_new_actor { 559 - True -> { 560 - process.spawn_unlinked(fn() { 561 - backfill.backfill_collections_for_actor( 562 - ctx.db, 563 - user_info.did, 564 - ctx.collection_ids, 565 - ctx.external_collection_ids, 566 - ctx.plc_url, 567 - ) 568 - }) 569 - Nil 570 - } 571 - False -> Nil 572 - } 573 - 574 - use session <- result.try( 575 - atproto_auth.get_atp_session( 576 - ctx.db, 577 - ctx.did_cache, 578 - token, 579 - ctx.signing_key, 580 - ctx.atp_client_id, 581 - ) 582 - |> result.map_error(fn(err) { 583 - case err { 584 - atproto_auth.SessionNotFound -> "Session not found" 585 - atproto_auth.SessionNotReady -> "Session not ready" 586 - atproto_auth.RefreshFailed(msg) -> "Token refresh failed: " <> msg 587 - atproto_auth.DIDResolutionFailed(msg) -> 588 - "DID resolution failed: " <> msg 589 - _ -> "Failed to get ATP session" 590 - } 591 - }), 592 - ) 593 - 594 - // Step 5: Fetch lexicons for validation and blob path extraction 595 - use all_lexicon_records <- result.try( 596 - lexicons.get_all(ctx.db) 597 - |> result.map_error(fn(_) { "Failed to fetch lexicons" }), 598 - ) 599 - 600 - // Parse all lexicon JSON strings 601 - use all_lex_jsons <- result.try( 602 - all_lexicon_records 603 - |> list.try_map(fn(lex) { 604 - honk.parse_json_string(lex.json) 605 - |> result.map_error(fn(e) { 606 - "Failed to parse lexicon JSON: " <> errors.to_string(e) 607 - }) 608 - }), 609 - ) 610 - 611 - // Step 6: Transform blob inputs from GraphQL format to AT Protocol format 612 - let blob_paths = get_blob_paths(collection, all_lex_jsons) 613 - let transformed_input = transform_blob_inputs(input, blob_paths) 614 - 615 - // Convert transformed input to JSON for validation and AT Protocol 616 - let record_json_value = graphql_value_to_json_value(transformed_input) 617 - let record_json_string = json.to_string(record_json_value) 618 - 619 - // Step 7: Validate against lexicon 620 - use _ <- result.try( 621 - honk.validate_record(all_lex_jsons, collection, record_json_value) 622 - |> result.map_error(fn(err) { 623 - "Validation failed: " <> errors.to_string(err) 624 - }), 625 - ) 626 - 627 - { 628 - // Step 8: Call putRecord via AT Protocol 629 - let update_body = 630 - json.object([ 631 - #("repo", json.string(user_info.did)), 632 - #("collection", json.string(collection)), 633 - #("rkey", json.string(rkey)), 634 - #("record", record_json_value), 635 - ]) 636 - |> json.to_string 637 - 638 - let pds_url = session.pds_endpoint <> "/xrpc/com.atproto.repo.putRecord" 639 - 640 - use response <- result.try( 641 - dpop.make_dpop_request("POST", pds_url, session, update_body) 642 - |> result.map_error(fn(_) { "Failed to update record on PDS" }), 643 - ) 644 - 645 - // Step 8: Check HTTP status and parse response 646 - use #(uri, cid) <- result.try(case response.status { 647 - 200 | 201 -> { 648 - // Parse successful response 649 - let response_decoder = { 650 - use uri <- decode.field("uri", decode.string) 651 - use cid <- decode.field("cid", decode.string) 652 - decode.success(#(uri, cid)) 653 - } 654 - 655 - json.parse(response.body, response_decoder) 656 - |> result.map_error(fn(_) { 657 - "Failed to parse PDS success response. Body: " <> response.body 658 - }) 659 - } 660 - _ -> { 661 - // Return actual PDS error 662 - Error( 663 - "PDS request failed with status " 664 - <> int.to_string(response.status) 665 - <> ": " 666 - <> response.body, 667 - ) 668 - } 669 - }) 670 - 671 - // Step 9: Update the record in the database 672 - use _ <- result.try( 673 - records.update(ctx.db, uri, cid, record_json_string) 674 - |> result.map_error(fn(_) { "Failed to update record in database" }), 675 - ) 676 - 677 - // Step 10: Return the updated record as a GraphQL value 678 - // The field resolvers expect the record data under "value" key 679 - Ok( 680 - value.Object([ 681 - #("uri", value.String(uri)), 682 - #("cid", value.String(cid)), 683 - #("did", value.String(user_info.did)), 684 - #("collection", value.String(collection)), 685 - #("indexedAt", value.String("")), 686 - #("value", input), 687 - ]), 688 - ) 689 - } 690 - } 691 - } 692 - 693 - /// Create a resolver factory for delete mutations 694 - pub fn delete_resolver_factory( 695 - collection: String, 696 - ctx: MutationContext, 697 - ) -> schema.Resolver { 698 - fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 699 - // Step 1: Extract auth token from context data 700 - let token = case resolver_ctx.data { 701 - option.Some(value.Object(fields)) -> { 702 - case list.key_find(fields, "auth_token") { 703 - Ok(value.String(t)) -> Ok(t) 704 - Ok(_) -> Error("auth_token must be a string") 705 - Error(_) -> 706 - Error( 707 - "Authentication required. Please provide Authorization header.", 708 - ) 709 - } 710 - } 711 - _ -> 712 - Error("Authentication required. Please provide Authorization header.") 713 - } 714 - 715 - use token <- result.try(token) 716 - 717 - // Step 2: Get rkey (required) from arguments 718 - let rkey_result = case schema.get_argument(resolver_ctx, "rkey") { 719 - option.Some(value.String(r)) -> Ok(r) 720 - option.Some(_) -> Error("rkey must be a string") 721 - option.None -> Error("Missing required argument: rkey") 722 - } 723 - 724 - use rkey <- result.try(rkey_result) 725 - 726 - // Step 3: Verify OAuth token and get AT Protocol session 727 - use user_info <- result.try( 728 - atproto_auth.verify_token(ctx.db, token) 729 - |> result.map_error(fn(err) { 730 - case err { 731 - atproto_auth.UnauthorizedToken -> "Unauthorized" 732 - atproto_auth.TokenExpired -> "Token expired" 733 - atproto_auth.MissingAuthHeader -> "Missing authentication" 734 - atproto_auth.InvalidAuthHeader -> "Invalid authentication header" 735 - _ -> "Authentication error" 736 - } 737 - }), 738 - ) 739 - 740 - // Step 4: Ensure actor exists in database 741 - use is_new_actor <- result.try(actor_validator.ensure_actor_exists( 742 - ctx.db, 743 - user_info.did, 744 - ctx.plc_url, 745 - )) 746 - 747 - // If new actor, spawn backfill for all collections 748 - case is_new_actor { 749 - True -> { 750 - process.spawn_unlinked(fn() { 751 - backfill.backfill_collections_for_actor( 752 - ctx.db, 753 - user_info.did, 754 - ctx.collection_ids, 755 - ctx.external_collection_ids, 756 - ctx.plc_url, 757 - ) 758 - }) 759 - Nil 760 - } 761 - False -> Nil 762 - } 763 - 764 - use session <- result.try( 765 - atproto_auth.get_atp_session( 766 - ctx.db, 767 - ctx.did_cache, 768 - token, 769 - ctx.signing_key, 770 - ctx.atp_client_id, 771 - ) 772 - |> result.map_error(fn(err) { 773 - case err { 774 - atproto_auth.SessionNotFound -> "Session not found" 775 - atproto_auth.SessionNotReady -> "Session not ready" 776 - atproto_auth.RefreshFailed(msg) -> "Token refresh failed: " <> msg 777 - atproto_auth.DIDResolutionFailed(msg) -> 778 - "DID resolution failed: " <> msg 779 - _ -> "Failed to get ATP session" 780 - } 781 - }), 782 - ) 783 - 784 - // Step 5: Build the record URI to be deleted 785 - let uri = "at://" <> user_info.did <> "/" <> collection <> "/" <> rkey 786 - 787 - // Step 6: Call deleteRecord via AT Protocol 788 - let delete_body = 789 - json.object([ 790 - #("repo", json.string(user_info.did)), 791 - #("collection", json.string(collection)), 792 - #("rkey", json.string(rkey)), 793 - ]) 794 - |> json.to_string 795 - 796 - let pds_url = session.pds_endpoint <> "/xrpc/com.atproto.repo.deleteRecord" 797 - 798 - use response <- result.try( 799 - dpop.make_dpop_request("POST", pds_url, session, delete_body) 800 - |> result.map_error(fn(_) { "Failed to delete record on PDS" }), 801 - ) 802 - 803 - // Step 7: Check HTTP status 804 - use _ <- result.try(case response.status { 805 - 200 | 201 | 204 -> Ok(Nil) 806 - _ -> { 807 - // Return actual PDS error 808 - Error( 809 - "PDS delete request failed with status " 810 - <> int.to_string(response.status) 811 - <> ": " 812 - <> response.body, 813 - ) 814 - } 815 - }) 816 - 817 - // Step 8: Delete the record from the database 818 - use _ <- result.try( 819 - records.delete(ctx.db, uri) 820 - |> result.map_error(fn(_) { "Failed to delete record from database" }), 821 - ) 822 - 823 - // Step 9: Return the URI of the deleted record 824 - Ok(value.Object([#("uri", value.String(uri))])) 825 - } 826 - } 827 - 828 - /// Create a resolver for uploadBlob mutation 829 - pub fn upload_blob_resolver_factory(ctx: MutationContext) -> schema.Resolver { 830 - fn(resolver_ctx: schema.Context) -> Result(value.Value, String) { 831 - // Step 1: Extract auth token from context data 832 - let token = case resolver_ctx.data { 833 - option.Some(value.Object(fields)) -> { 834 - case list.key_find(fields, "auth_token") { 835 - Ok(value.String(t)) -> Ok(t) 836 - Ok(_) -> Error("auth_token must be a string") 837 - Error(_) -> 838 - Error( 839 - "Authentication required. Please provide Authorization header.", 840 - ) 841 - } 842 - } 843 - _ -> 844 - Error("Authentication required. Please provide Authorization header.") 845 - } 846 - 847 - use token <- result.try(token) 848 - 849 - // Step 2: Get data and mimeType from arguments 850 - let data_result = case schema.get_argument(resolver_ctx, "data") { 851 - option.Some(value.String(d)) -> Ok(d) 852 - option.Some(_) -> Error("data must be a string") 853 - option.None -> Error("Missing required argument: data") 854 - } 855 - 856 - use data_base64 <- result.try(data_result) 857 - 858 - let mime_type_result = case schema.get_argument(resolver_ctx, "mimeType") { 859 - option.Some(value.String(m)) -> Ok(m) 860 - option.Some(_) -> Error("mimeType must be a string") 861 - option.None -> Error("Missing required argument: mimeType") 862 - } 863 - 864 - use mime_type <- result.try(mime_type_result) 865 - 866 - // Step 3: Verify OAuth token and get AT Protocol session 867 - use user_info <- result.try( 868 - atproto_auth.verify_token(ctx.db, token) 869 - |> result.map_error(fn(err) { 870 - case err { 871 - atproto_auth.UnauthorizedToken -> "Unauthorized" 872 - atproto_auth.TokenExpired -> "Token expired" 873 - atproto_auth.MissingAuthHeader -> "Missing authentication" 874 - atproto_auth.InvalidAuthHeader -> "Invalid authentication header" 875 - _ -> "Authentication error" 876 - } 877 - }), 878 - ) 879 - 880 - // Step 4: Ensure actor exists in database 881 - use is_new_actor <- result.try(actor_validator.ensure_actor_exists( 882 - ctx.db, 883 - user_info.did, 884 - ctx.plc_url, 885 - )) 886 - 887 - // If new actor, spawn backfill for all collections 888 - case is_new_actor { 889 - True -> { 890 - process.spawn_unlinked(fn() { 891 - backfill.backfill_collections_for_actor( 892 - ctx.db, 893 - user_info.did, 894 - ctx.collection_ids, 895 - ctx.external_collection_ids, 896 - ctx.plc_url, 897 - ) 898 - }) 899 - Nil 900 - } 901 - False -> Nil 902 - } 903 - 904 - use session <- result.try( 905 - atproto_auth.get_atp_session( 906 - ctx.db, 907 - ctx.did_cache, 908 - token, 909 - ctx.signing_key, 910 - ctx.atp_client_id, 911 - ) 912 - |> result.map_error(fn(err) { 913 - case err { 914 - atproto_auth.SessionNotFound -> "Session not found" 915 - atproto_auth.SessionNotReady -> "Session not ready" 916 - atproto_auth.RefreshFailed(msg) -> "Token refresh failed: " <> msg 917 - atproto_auth.DIDResolutionFailed(msg) -> 918 - "DID resolution failed: " <> msg 919 - _ -> "Failed to get ATP session" 920 - } 921 - }), 922 - ) 923 - 924 - // Step 5: Decode base64 data to binary 925 - use binary_data <- result.try( 926 - decode_base64(data_base64) 927 - |> result.map_error(fn(_) { "Failed to decode base64 data" }), 928 - ) 929 - 930 - // Step 6: Upload blob to PDS 931 - let pds_url = session.pds_endpoint <> "/xrpc/com.atproto.repo.uploadBlob" 932 - 933 - use response <- result.try( 934 - dpop.make_dpop_request_with_binary( 935 - "POST", 936 - pds_url, 937 - session, 938 - binary_data, 939 - mime_type, 940 - ) 941 - |> result.map_error(fn(_) { "Failed to upload blob to PDS" }), 942 - ) 943 - 944 - // Step 7: Check HTTP status and parse response 945 - use blob_ref <- result.try(case response.status { 946 - 200 | 201 -> { 947 - // Parse PDS response: { blob: { $type: "blob", ref: { $link: "..." }, mimeType: "...", size: 123 } } 948 - let response_decoder = { 949 - use blob <- decode.field("blob", decode.dynamic) 950 - decode.success(blob) 951 - } 952 - 953 - case json.parse(response.body, response_decoder) { 954 - Ok(blob_dynamic) -> { 955 - // Extract blob fields from the response 956 - extract_blob_from_dynamic(blob_dynamic, user_info.did) 957 - } 958 - Error(_) -> { 959 - Error("Failed to parse PDS response. Body: " <> response.body) 960 - } 961 - } 962 - } 963 - _ -> { 964 - // Return actual PDS error 965 - Error( 966 - "PDS request failed with status " 967 - <> int.to_string(response.status) 968 - <> ": " 969 - <> response.body, 970 - ) 971 - } 972 - }) 973 - 974 - // Step 8: Return the BlobUploadResponse directly (flat structure) 975 - Ok(blob_ref) 976 - } 977 - } 978 - 979 - /// Decode base64 string to bit array 980 - fn decode_base64(base64_str: String) -> Result(BitArray, Nil) { 981 - // Erlang's base64:decode returns BitArray directly, not wrapped in Ok() 982 - // So we wrap it in Ok to match our function signature 983 - Ok(do_erlang_base64_decode(base64_str)) 984 - } 985 - 986 - /// Extract blob fields from dynamic PDS response 987 - fn extract_blob_from_dynamic( 988 - blob_dynamic: dynamic.Dynamic, 989 - did: String, 990 - ) -> Result(value.Value, String) { 991 - // Create a decoder for the nested ref.$link field 992 - let ref_link_decoder = { 993 - use link <- decode.field("$link", decode.string) 994 - decode.success(link) 995 - } 996 - 997 - // Decode all fields including nested ref.$link 998 - let full_decoder = { 999 - use mime_type <- decode.field("mimeType", decode.string) 1000 - use size <- decode.field("size", decode.int) 1001 - use ref <- decode.field("ref", ref_link_decoder) 1002 - decode.success(#(ref, mime_type, size)) 1003 - } 1004 - 1005 - use #(ref, mime_type, size) <- result.try( 1006 - decode.run(blob_dynamic, full_decoder) 1007 - |> result.map_error(fn(_) { "Failed to decode blob fields" }), 1008 - ) 1009 - 1010 - Ok( 1011 - value.Object([ 1012 - #("ref", value.String(ref)), 1013 - #("mime_type", value.String(mime_type)), 1014 - #("size", value.Int(size)), 1015 - #("did", value.String(did)), 1016 - ]), 1017 - ) 1018 - } 1019 - 1020 - /// Erlang FFI: base64:decode/1 returns BitArray directly (not Result) 1021 - @external(erlang, "base64", "decode") 1022 - fn do_erlang_base64_decode(a: String) -> BitArray
+11 -11
server/test/join_integration_test.gleam
··· 12 12 import gleam/option 13 13 import gleam/string 14 14 import gleeunit/should 15 - import graphql_gleam 15 + import graphql/lexicon/schema as lexicon_schema 16 16 import lib/oauth/did_cache 17 17 import sqlight 18 18 ··· 226 226 227 227 let assert Ok(cache) = did_cache.start() 228 228 let assert Ok(response_json) = 229 - graphql_gleam.execute_query_with_db( 229 + lexicon_schema.execute_query_with_db( 230 230 db, 231 231 query, 232 232 "{}", ··· 324 324 325 325 let assert Ok(cache) = did_cache.start() 326 326 let assert Ok(response_json) = 327 - graphql_gleam.execute_query_with_db( 327 + lexicon_schema.execute_query_with_db( 328 328 db, 329 329 query, 330 330 "{}", ··· 439 439 440 440 let assert Ok(cache) = did_cache.start() 441 441 let assert Ok(response_json) = 442 - graphql_gleam.execute_query_with_db( 442 + lexicon_schema.execute_query_with_db( 443 443 db, 444 444 query, 445 445 "{}", ··· 565 565 566 566 let assert Ok(cache) = did_cache.start() 567 567 let assert Ok(response_json) = 568 - graphql_gleam.execute_query_with_db( 568 + lexicon_schema.execute_query_with_db( 569 569 db, 570 570 query, 571 571 "{}", ··· 695 695 696 696 let assert Ok(cache) = did_cache.start() 697 697 let assert Ok(response_json) = 698 - graphql_gleam.execute_query_with_db( 698 + lexicon_schema.execute_query_with_db( 699 699 db, 700 700 query, 701 701 "{}", ··· 835 835 836 836 let assert Ok(cache) = did_cache.start() 837 837 let assert Ok(response_json) = 838 - graphql_gleam.execute_query_with_db( 838 + lexicon_schema.execute_query_with_db( 839 839 db, 840 840 query, 841 841 "{}", ··· 985 985 986 986 let assert Ok(cache) = did_cache.start() 987 987 let assert Ok(response_json) = 988 - graphql_gleam.execute_query_with_db( 988 + lexicon_schema.execute_query_with_db( 989 989 db, 990 990 query, 991 991 "{}", ··· 1103 1103 1104 1104 let assert Ok(cache) = did_cache.start() 1105 1105 let assert Ok(response_json) = 1106 - graphql_gleam.execute_query_with_db( 1106 + lexicon_schema.execute_query_with_db( 1107 1107 db, 1108 1108 query, 1109 1109 "{}", ··· 1239 1239 1240 1240 let assert Ok(cache) = did_cache.start() 1241 1241 let assert Ok(response_json) = 1242 - graphql_gleam.execute_query_with_db( 1242 + lexicon_schema.execute_query_with_db( 1243 1243 db, 1244 1244 query, 1245 1245 "{}", ··· 1501 1501 1502 1502 let assert Ok(cache) = did_cache.start() 1503 1503 let assert Ok(response_json) = 1504 - graphql_gleam.execute_query_with_db( 1504 + lexicon_schema.execute_query_with_db( 1505 1505 db, 1506 1506 query, 1507 1507 "{}",
+6 -6
server/test/nested_join_sortby_where_test.gleam
··· 15 15 import gleam/option 16 16 import gleam/string 17 17 import gleeunit/should 18 - import graphql_gleam 18 + import graphql/lexicon/schema as lexicon_schema 19 19 import lib/oauth/did_cache 20 20 import sqlight 21 21 ··· 197 197 198 198 let assert Ok(cache) = did_cache.start() 199 199 let assert Ok(response_json) = 200 - graphql_gleam.execute_query_with_db( 200 + lexicon_schema.execute_query_with_db( 201 201 db, 202 202 query, 203 203 "{}", ··· 343 343 344 344 let assert Ok(cache) = did_cache.start() 345 345 let assert Ok(response_json) = 346 - graphql_gleam.execute_query_with_db( 346 + lexicon_schema.execute_query_with_db( 347 347 db, 348 348 query, 349 349 "{}", ··· 464 464 465 465 let assert Ok(cache) = did_cache.start() 466 466 let assert Ok(response_json) = 467 - graphql_gleam.execute_query_with_db( 467 + lexicon_schema.execute_query_with_db( 468 468 db, 469 469 query, 470 470 "{}", ··· 594 594 595 595 let assert Ok(cache) = did_cache.start() 596 596 let assert Ok(response_json) = 597 - graphql_gleam.execute_query_with_db( 597 + lexicon_schema.execute_query_with_db( 598 598 db, 599 599 query, 600 600 "{}", ··· 764 764 765 765 let assert Ok(cache) = did_cache.start() 766 766 let assert Ok(response_json) = 767 - graphql_gleam.execute_query_with_db( 767 + lexicon_schema.execute_query_with_db( 768 768 db, 769 769 query, 770 770 "{}",
+5 -5
server/test/paginated_join_test.gleam
··· 14 14 import gleam/option 15 15 import gleam/string 16 16 import gleeunit/should 17 - import graphql_gleam 17 + import graphql/lexicon/schema as lexicon_schema 18 18 import lib/oauth/did_cache 19 19 import sqlight 20 20 ··· 229 229 230 230 let assert Ok(cache) = did_cache.start() 231 231 let assert Ok(response_json) = 232 - graphql_gleam.execute_query_with_db( 232 + lexicon_schema.execute_query_with_db( 233 233 db, 234 234 query, 235 235 "{}", ··· 348 348 349 349 let assert Ok(cache) = did_cache.start() 350 350 let assert Ok(response_json) = 351 - graphql_gleam.execute_query_with_db( 351 + lexicon_schema.execute_query_with_db( 352 352 db, 353 353 query, 354 354 "{}", ··· 465 465 466 466 let assert Ok(cache) = did_cache.start() 467 467 let assert Ok(response_json) = 468 - graphql_gleam.execute_query_with_db( 468 + lexicon_schema.execute_query_with_db( 469 469 db, 470 470 query, 471 471 "{}", ··· 583 583 584 584 let assert Ok(cache) = did_cache.start() 585 585 let assert Ok(response_json) = 586 - graphql_gleam.execute_query_with_db( 586 + lexicon_schema.execute_query_with_db( 587 587 db, 588 588 query, 589 589 "{}",
+4 -4
server/test/reverse_join_field_resolution_test.gleam
··· 12 12 import gleam/option 13 13 import gleam/string 14 14 import gleeunit/should 15 - import graphql_gleam 15 + import graphql/lexicon/schema as lexicon_schema 16 16 import lib/oauth/did_cache 17 17 import sqlight 18 18 ··· 344 344 345 345 let assert Ok(cache) = did_cache.start() 346 346 let assert Ok(response_json) = 347 - graphql_gleam.execute_query_with_db( 347 + lexicon_schema.execute_query_with_db( 348 348 conn, 349 349 query, 350 350 "{}", ··· 426 426 427 427 let assert Ok(cache) = did_cache.start() 428 428 let assert Ok(response_json) = 429 - graphql_gleam.execute_query_with_db( 429 + lexicon_schema.execute_query_with_db( 430 430 conn, 431 431 query, 432 432 "{}", ··· 588 588 589 589 let assert Ok(cache) = did_cache.start() 590 590 let assert Ok(response_json) = 591 - graphql_gleam.execute_query_with_db( 591 + lexicon_schema.execute_query_with_db( 592 592 conn, 593 593 query, 594 594 "{}",