Auto-indexing service and GraphQL API for AT Protocol Records

initial commit

+10744
+1
.gitignore
··· 1 + erl_crash.dump
+53
Makefile
··· 1 + .PHONY: help test build clean run 2 + 3 + help: 4 + @echo "QuickSlice - Makefile Commands" 5 + @echo "" 6 + @echo " make run - Start server" 7 + @echo " make test - Run all tests" 8 + @echo " make build - Build all projects" 9 + @echo " make clean - Clean build artifacts" 10 + @echo "" 11 + 12 + # Build all projects 13 + build: 14 + @echo "Building graphql package..." 15 + @cd graphql && gleam build 16 + @echo "" 17 + @echo "Building lexicon_graphql package..." 18 + @cd lexicon_graphql && gleam build 19 + @echo "" 20 + @echo "Building jetstream package..." 21 + @cd jetstream && gleam build 22 + @echo "" 23 + @echo "Building server..." 24 + @cd server && gleam build 25 + @echo "" 26 + @echo "Build complete" 27 + 28 + # Run all tests 29 + test: build 30 + @echo "Running graphql tests..." 31 + @cd graphql && gleam test 32 + @echo "" 33 + @echo "Running lexicon_graphql tests..." 34 + @cd lexicon_graphql && gleam test 35 + @echo "" 36 + @echo "Running server tests..." 37 + @cd server && gleam test 38 + @echo "" 39 + @echo "All tests passed" 40 + 41 + # Run server 42 + run: build 43 + @echo "Starting server..." 44 + @cd server && gleam run 45 + 46 + # Clean build artifacts 47 + clean: 48 + @echo "Cleaning build artifacts..." 49 + @cd graphql && gleam clean 50 + @cd lexicon_graphql && gleam clean 51 + @cd jetstream && gleam clean 52 + @cd server && gleam clean 53 + @echo "Clean complete"
+4
graphql/.gitignore
··· 1 + *.beam 2 + *.ez 3 + /build 4 + erl_crash.dump
+154
graphql/README.md
··· 1 + # GraphQL 2 + 3 + A GraphQL implementation in Gleam providing query parsing, execution, and introspection support. 4 + 5 + ## Features 6 + 7 + ### Core GraphQL Functionality 8 + - **Query Parsing**: GraphQL query language support including: 9 + - Field selection 10 + - Arguments 11 + - Aliases 12 + - Fragments (inline and named) 13 + 14 + - **Schema Definition**: Type-safe schema builder with: 15 + - Object types 16 + - Scalar types (String, Int, Float, Boolean, ID) 17 + - List types 18 + - Non-null types 19 + - Field resolvers with context-based data access 20 + 21 + - **Query Execution**: Execution engine with: 22 + - Recursive field resolution 23 + - Nested object support 24 + - List handling with proper field filtering 25 + - Fragment spreading and inline fragments 26 + - Error collection and reporting 27 + - Path tracking for error context 28 + 29 + - **Introspection**: GraphQL introspection support 30 + - Schema introspection queries 31 + - Type introspection 32 + - Field introspection 33 + - Compatible with GraphiQL and other GraphQL clients 34 + 35 + ## Architecture 36 + 37 + The package is organized into several modules: 38 + 39 + - `graphql/lexer.gleam` - Tokenizes GraphQL query strings 40 + - `graphql/parser.gleam` - Parses tokens into an AST 41 + - `graphql/schema.gleam` - Schema definition and type system 42 + - `graphql/executor.gleam` - Query execution engine 43 + - `graphql/value.gleam` - GraphQL value types 44 + - `graphql/introspection.gleam` - Schema introspection 45 + 46 + ## Usage 47 + 48 + ### Defining a Schema 49 + 50 + ```gleam 51 + import graphql/schema 52 + import graphql/value 53 + 54 + // Define a simple User type 55 + let user_type = schema.object_type( 56 + "User", 57 + "A user in the system", 58 + [ 59 + schema.field("id", schema.id_type(), "User ID", fn(ctx) { 60 + // Extract id from context 61 + case ctx.data { 62 + option.Some(value.Object(fields)) -> { 63 + case list.key_find(fields, "id") { 64 + Ok(id_val) -> Ok(id_val) 65 + Error(_) -> Ok(value.Null) 66 + } 67 + } 68 + _ -> Ok(value.Null) 69 + } 70 + }), 71 + schema.field("name", schema.string_type(), "User name", fn(ctx) { 72 + // Extract name from context 73 + // ... resolver implementation 74 + }), 75 + ] 76 + ) 77 + 78 + // Define root query type 79 + let query_type = schema.object_type( 80 + "Query", 81 + "Root query type", 82 + [ 83 + schema.field("user", user_type, "Get a user", fn(_ctx) { 84 + Ok(value.Object([ 85 + #("id", value.String("1")), 86 + #("name", value.String("Alice")), 87 + ])) 88 + }), 89 + ] 90 + ) 91 + 92 + // Create schema 93 + let my_schema = schema.new(query_type) 94 + ``` 95 + 96 + ### Executing Queries 97 + 98 + ```gleam 99 + import graphql/executor 100 + import graphql/schema 101 + 102 + let query = "{ user { id name } }" 103 + let result = executor.execute(query, my_schema, schema.Context(None)) 104 + 105 + case result { 106 + Ok(executor.Response(data: data, errors: [])) -> { 107 + // Query succeeded 108 + io.println("Data: " <> string.inspect(data)) 109 + } 110 + Ok(executor.Response(data: data, errors: errors)) -> { 111 + // Query executed with errors 112 + io.println("Data: " <> string.inspect(data)) 113 + io.println("Errors: " <> string.inspect(errors)) 114 + } 115 + Error(err) -> { 116 + // Query failed to parse or execute 117 + io.println("Error: " <> err) 118 + } 119 + } 120 + ``` 121 + 122 + ## Test Coverage 123 + 124 + The package includes tests covering: 125 + - Parsing 126 + - Execution 127 + - Schema 128 + - Introspection 129 + - Edge cases 130 + 131 + ## Known Limitations 132 + 133 + - Mutations not yet implemented 134 + - Subscriptions not yet implemented 135 + - Directives not yet implemented 136 + - Variables not yet implemented 137 + - Custom scalar types limited to built-in types 138 + 139 + ## Dependencies 140 + 141 + - `gleam_stdlib` >= 0.44.0 142 + 143 + ## Development 144 + 145 + Run tests: 146 + ```sh 147 + cd graphql 148 + gleam test 149 + ``` 150 + 151 + Build: 152 + ```sh 153 + gleam build 154 + ```
+19
graphql/gleam.toml
··· 1 + name = "graphql" 2 + version = "1.0.0" 3 + 4 + # Fill out these fields if you intend to generate HTML documentation or publish 5 + # your project to the Hex package manager. 6 + # 7 + # description = "" 8 + # licences = ["Apache-2.0"] 9 + # repository = { type = "github", user = "", repo = "" } 10 + # links = [{ title = "Website", href = "" }] 11 + # 12 + # For a full reference of all the available options, you can have a look at 13 + # https://gleam.run/writing-gleam/gleam-toml/. 14 + 15 + [dependencies] 16 + gleam_stdlib = ">= 0.44.0 and < 2.0.0" 17 + 18 + [dev-dependencies] 19 + gleeunit = ">= 1.0.0 and < 2.0.0"
+11
graphql/manifest.toml
··· 1 + # This file was generated by Gleam 2 + # You typically do not need to edit this file 3 + 4 + packages = [ 5 + { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" }, 6 + { name = "gleeunit", version = "1.7.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "CD701726CBCE5588B375D157B4391CFD0F2F134CD12D9B6998A395484DE05C58" }, 7 + ] 8 + 9 + [requirements] 10 + gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" } 11 + gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
+620
graphql/src/graphql/executor.gleam
··· 1 + /// GraphQL Executor 2 + /// 3 + /// Executes GraphQL queries against a schema 4 + import gleam/dict.{type Dict} 5 + import gleam/list 6 + import gleam/option.{None, Some} 7 + import graphql/introspection 8 + import graphql/parser 9 + import graphql/schema 10 + import graphql/value 11 + 12 + /// GraphQL Error 13 + pub type GraphQLError { 14 + GraphQLError(message: String, path: List(String)) 15 + } 16 + 17 + /// GraphQL Response 18 + pub type Response { 19 + Response(data: value.Value, errors: List(GraphQLError)) 20 + } 21 + 22 + /// Execute a GraphQL query 23 + pub fn execute( 24 + query: String, 25 + graphql_schema: schema.Schema, 26 + ctx: schema.Context, 27 + ) -> Result(Response, String) { 28 + // Parse the query 29 + case parser.parse(query) { 30 + Error(parse_error) -> 31 + Error("Parse error: " <> format_parse_error(parse_error)) 32 + Ok(document) -> { 33 + // Execute the document 34 + case execute_document(document, graphql_schema, ctx) { 35 + Ok(#(data, errors)) -> Ok(Response(data, errors)) 36 + Error(err) -> Error(err) 37 + } 38 + } 39 + } 40 + } 41 + 42 + fn format_parse_error(err: parser.ParseError) -> String { 43 + case err { 44 + parser.UnexpectedToken(_, msg) -> msg 45 + parser.UnexpectedEndOfInput(msg) -> msg 46 + parser.LexerError(_) -> "Lexer error" 47 + } 48 + } 49 + 50 + /// Execute a document 51 + fn execute_document( 52 + document: parser.Document, 53 + graphql_schema: schema.Schema, 54 + ctx: schema.Context, 55 + ) -> Result(#(value.Value, List(GraphQLError)), String) { 56 + case document { 57 + parser.Document(operations) -> { 58 + // Separate fragments from executable operations 59 + let #(fragments, executable_ops) = partition_operations(operations) 60 + 61 + // Build fragments dictionary 62 + let fragments_dict = build_fragments_dict(fragments) 63 + 64 + // Execute the first executable operation 65 + case executable_ops { 66 + [operation, ..] -> 67 + execute_operation(operation, graphql_schema, ctx, fragments_dict) 68 + [] -> Error("No executable operations in document") 69 + } 70 + } 71 + } 72 + } 73 + 74 + /// Partition operations into fragments and executable operations 75 + fn partition_operations( 76 + operations: List(parser.Operation), 77 + ) -> #(List(parser.Operation), List(parser.Operation)) { 78 + list.partition(operations, fn(op) { 79 + case op { 80 + parser.FragmentDefinition(_, _, _) -> True 81 + _ -> False 82 + } 83 + }) 84 + } 85 + 86 + /// Build a dictionary of fragments keyed by name 87 + fn build_fragments_dict( 88 + fragments: List(parser.Operation), 89 + ) -> Dict(String, parser.Operation) { 90 + fragments 91 + |> list.filter_map(fn(frag) { 92 + case frag { 93 + parser.FragmentDefinition(name, _, _) -> Ok(#(name, frag)) 94 + _ -> Error(Nil) 95 + } 96 + }) 97 + |> dict.from_list 98 + } 99 + 100 + /// Execute an operation 101 + fn execute_operation( 102 + operation: parser.Operation, 103 + graphql_schema: schema.Schema, 104 + ctx: schema.Context, 105 + fragments: Dict(String, parser.Operation), 106 + ) -> Result(#(value.Value, List(GraphQLError)), String) { 107 + let root_type = schema.query_type(graphql_schema) 108 + 109 + case operation { 110 + parser.Query(selection_set) -> 111 + execute_selection_set( 112 + selection_set, 113 + root_type, 114 + graphql_schema, 115 + ctx, 116 + fragments, 117 + [], 118 + ) 119 + parser.NamedQuery(_, _, selection_set) -> 120 + execute_selection_set( 121 + selection_set, 122 + root_type, 123 + graphql_schema, 124 + ctx, 125 + fragments, 126 + [], 127 + ) 128 + parser.Mutation(_) -> Error("Mutations not yet implemented") 129 + parser.NamedMutation(_, _, _) -> Error("Mutations not yet implemented") 130 + parser.FragmentDefinition(_, _, _) -> 131 + Error("Fragment definitions are not executable operations") 132 + } 133 + } 134 + 135 + /// Execute a selection set 136 + fn execute_selection_set( 137 + selection_set: parser.SelectionSet, 138 + parent_type: schema.Type, 139 + graphql_schema: schema.Schema, 140 + ctx: schema.Context, 141 + fragments: Dict(String, parser.Operation), 142 + path: List(String), 143 + ) -> Result(#(value.Value, List(GraphQLError)), String) { 144 + case selection_set { 145 + parser.SelectionSet(selections) -> { 146 + let results = 147 + list.map(selections, fn(selection) { 148 + execute_selection( 149 + selection, 150 + parent_type, 151 + graphql_schema, 152 + ctx, 153 + fragments, 154 + path, 155 + ) 156 + }) 157 + 158 + // Collect all data and errors, merging fragment fields 159 + let #(data, errors) = collect_and_merge_fields(results) 160 + 161 + Ok(#(value.Object(data), errors)) 162 + } 163 + } 164 + } 165 + 166 + /// Collect and merge fields from selection results, handling fragment fields 167 + fn collect_and_merge_fields( 168 + results: List(Result(#(String, value.Value, List(GraphQLError)), String)), 169 + ) -> #(List(#(String, value.Value)), List(GraphQLError)) { 170 + let #(data, errors) = 171 + results 172 + |> list.fold(#([], []), fn(acc, r) { 173 + let #(fields_acc, errors_acc) = acc 174 + case r { 175 + Ok(#("__fragment_fields", value.Object(fragment_fields), errs)) -> { 176 + // Merge fragment fields into parent 177 + #( 178 + list.append(fields_acc, fragment_fields), 179 + list.append(errors_acc, errs), 180 + ) 181 + } 182 + Ok(#("__fragment_skip", _, _errs)) -> { 183 + // Skip fragment that didn't match type condition 184 + acc 185 + } 186 + Ok(#(name, val, errs)) -> { 187 + // Regular field 188 + #( 189 + list.append(fields_acc, [#(name, val)]), 190 + list.append(errors_acc, errs), 191 + ) 192 + } 193 + Error(_) -> acc 194 + } 195 + }) 196 + 197 + #(data, errors) 198 + } 199 + 200 + /// Execute a selection 201 + fn execute_selection( 202 + selection: parser.Selection, 203 + parent_type: schema.Type, 204 + graphql_schema: schema.Schema, 205 + ctx: schema.Context, 206 + fragments: Dict(String, parser.Operation), 207 + path: List(String), 208 + ) -> Result(#(String, value.Value, List(GraphQLError)), String) { 209 + case selection { 210 + parser.FragmentSpread(name) -> { 211 + // Look up the fragment definition 212 + case dict.get(fragments, name) { 213 + Error(_) -> Error("Fragment '" <> name <> "' not found") 214 + Ok(parser.FragmentDefinition( 215 + _fname, 216 + type_condition, 217 + fragment_selection_set, 218 + )) -> { 219 + // Check type condition 220 + let current_type_name = schema.type_name(parent_type) 221 + case type_condition == current_type_name { 222 + False -> { 223 + // Type condition doesn't match, skip this fragment 224 + // Return empty object as a placeholder that will be filtered out 225 + Ok(#("__fragment_skip", value.Null, [])) 226 + } 227 + True -> { 228 + // Type condition matches, execute fragment's selections 229 + case 230 + execute_selection_set( 231 + fragment_selection_set, 232 + parent_type, 233 + graphql_schema, 234 + ctx, 235 + fragments, 236 + path, 237 + ) 238 + { 239 + Ok(#(value.Object(fields), errs)) -> { 240 + // Fragment selections should be merged into parent 241 + // For now, return as a special marker 242 + Ok(#("__fragment_fields", value.Object(fields), errs)) 243 + } 244 + Ok(#(val, errs)) -> Ok(#("__fragment_fields", val, errs)) 245 + Error(err) -> Error(err) 246 + } 247 + } 248 + } 249 + } 250 + Ok(_) -> Error("Invalid fragment definition") 251 + } 252 + } 253 + parser.InlineFragment(type_condition_opt, inline_selections) -> { 254 + // Check type condition if present 255 + let current_type_name = schema.type_name(parent_type) 256 + let should_execute = case type_condition_opt { 257 + None -> True 258 + Some(type_condition) -> type_condition == current_type_name 259 + } 260 + 261 + case should_execute { 262 + False -> Ok(#("__fragment_skip", value.Null, [])) 263 + True -> { 264 + let inline_selection_set = parser.SelectionSet(inline_selections) 265 + case 266 + execute_selection_set( 267 + inline_selection_set, 268 + parent_type, 269 + graphql_schema, 270 + ctx, 271 + fragments, 272 + path, 273 + ) 274 + { 275 + Ok(#(value.Object(fields), errs)) -> 276 + Ok(#("__fragment_fields", value.Object(fields), errs)) 277 + Ok(#(val, errs)) -> Ok(#("__fragment_fields", val, errs)) 278 + Error(err) -> Error(err) 279 + } 280 + } 281 + } 282 + } 283 + parser.Field(name, _alias, _arguments, nested_selections) -> { 284 + // Handle introspection meta-fields 285 + case name { 286 + "__typename" -> { 287 + let type_name = schema.type_name(parent_type) 288 + Ok(#("__typename", value.String(type_name), [])) 289 + } 290 + "__schema" -> { 291 + let schema_value = introspection.schema_introspection(graphql_schema) 292 + // Handle nested selections on __schema 293 + case nested_selections { 294 + [] -> Ok(#("__schema", schema_value, [])) 295 + _ -> { 296 + let selection_set = parser.SelectionSet(nested_selections) 297 + // We don't have an actual type for __Schema, so we'll handle it specially 298 + // For now, just return the schema value with nested execution 299 + case 300 + execute_introspection_selection_set( 301 + selection_set, 302 + schema_value, 303 + graphql_schema, 304 + ctx, 305 + fragments, 306 + ["__schema", ..path], 307 + ) 308 + { 309 + Ok(#(nested_data, nested_errors)) -> 310 + Ok(#("__schema", nested_data, nested_errors)) 311 + Error(err) -> { 312 + let error = GraphQLError(err, ["__schema", ..path]) 313 + Ok(#("__schema", value.Null, [error])) 314 + } 315 + } 316 + } 317 + } 318 + } 319 + _ -> { 320 + // Get field from schema 321 + case schema.get_field(parent_type, name) { 322 + None -> { 323 + let error = GraphQLError("Field '" <> name <> "' not found", path) 324 + Ok(#(name, value.Null, [error])) 325 + } 326 + Some(field) -> { 327 + // Get the field's type for nested selections 328 + let field_type_def = schema.field_type(field) 329 + 330 + // Resolve the field 331 + case schema.resolve_field(field, ctx) { 332 + Error(err) -> { 333 + let error = GraphQLError(err, [name, ..path]) 334 + Ok(#(name, value.Null, [error])) 335 + } 336 + Ok(field_value) -> { 337 + // If there are nested selections, recurse 338 + case nested_selections { 339 + [] -> Ok(#(name, field_value, [])) 340 + _ -> { 341 + // Need to resolve nested fields 342 + case field_value { 343 + value.Object(_) -> { 344 + // Execute nested selections using the field's type, not parent type 345 + let selection_set = 346 + parser.SelectionSet(nested_selections) 347 + case 348 + execute_selection_set( 349 + selection_set, 350 + field_type_def, 351 + graphql_schema, 352 + ctx, 353 + fragments, 354 + [name, ..path], 355 + ) 356 + { 357 + Ok(#(nested_data, nested_errors)) -> 358 + Ok(#(name, nested_data, nested_errors)) 359 + Error(err) -> { 360 + let error = GraphQLError(err, [name, ..path]) 361 + Ok(#(name, value.Null, [error])) 362 + } 363 + } 364 + } 365 + value.List(items) -> { 366 + // Handle list with nested selections 367 + // Get the inner type from the LIST wrapper 368 + let inner_type = case 369 + schema.inner_type(field_type_def) 370 + { 371 + option.Some(t) -> t 372 + option.None -> field_type_def 373 + } 374 + 375 + // Execute nested selections on each item 376 + let selection_set = 377 + parser.SelectionSet(nested_selections) 378 + let results = 379 + list.map(items, fn(item) { 380 + // Create context with this item's data 381 + let item_ctx = schema.Context(option.Some(item)) 382 + execute_selection_set( 383 + selection_set, 384 + inner_type, 385 + graphql_schema, 386 + item_ctx, 387 + fragments, 388 + [name, ..path], 389 + ) 390 + }) 391 + 392 + // Collect results and errors 393 + let processed_items = 394 + results 395 + |> list.filter_map(fn(r) { 396 + case r { 397 + Ok(#(val, _)) -> Ok(val) 398 + Error(_) -> Error(Nil) 399 + } 400 + }) 401 + 402 + let all_errors = 403 + results 404 + |> list.flat_map(fn(r) { 405 + case r { 406 + Ok(#(_, errs)) -> errs 407 + Error(_) -> [] 408 + } 409 + }) 410 + 411 + Ok(#(name, value.List(processed_items), all_errors)) 412 + } 413 + _ -> Ok(#(name, field_value, [])) 414 + } 415 + } 416 + } 417 + } 418 + } 419 + } 420 + } 421 + } 422 + } 423 + } 424 + } 425 + } 426 + 427 + /// Execute a selection set on an introspection value (like __schema) 428 + /// This directly reads fields from the value.Object rather than using resolvers 429 + fn execute_introspection_selection_set( 430 + selection_set: parser.SelectionSet, 431 + value_obj: value.Value, 432 + graphql_schema: schema.Schema, 433 + ctx: schema.Context, 434 + fragments: Dict(String, parser.Operation), 435 + path: List(String), 436 + ) -> Result(#(value.Value, List(GraphQLError)), String) { 437 + case selection_set { 438 + parser.SelectionSet(selections) -> { 439 + case value_obj { 440 + value.List(items) -> { 441 + // For lists, execute the selection set on each item 442 + let results = 443 + list.map(items, fn(item) { 444 + execute_introspection_selection_set( 445 + selection_set, 446 + item, 447 + graphql_schema, 448 + ctx, 449 + fragments, 450 + path, 451 + ) 452 + }) 453 + 454 + // Collect the data and errors 455 + let data_items = 456 + results 457 + |> list.filter_map(fn(r) { 458 + case r { 459 + Ok(#(val, _)) -> Ok(val) 460 + Error(_) -> Error(Nil) 461 + } 462 + }) 463 + 464 + let all_errors = 465 + results 466 + |> list.flat_map(fn(r) { 467 + case r { 468 + Ok(#(_, errs)) -> errs 469 + Error(_) -> [] 470 + } 471 + }) 472 + 473 + Ok(#(value.List(data_items), all_errors)) 474 + } 475 + value.Null -> { 476 + // If the value is null, just return null regardless of selections 477 + // This handles cases like mutationType and subscriptionType which are null 478 + Ok(#(value.Null, [])) 479 + } 480 + value.Object(fields) -> { 481 + // For each selection, find the corresponding field in the object 482 + let results = 483 + list.map(selections, fn(selection) { 484 + case selection { 485 + parser.FragmentSpread(name) -> { 486 + // Look up the fragment definition 487 + case dict.get(fragments, name) { 488 + Error(_) -> Error(Nil) 489 + // Fragment not found, skip it 490 + Ok(parser.FragmentDefinition( 491 + _fname, 492 + _type_condition, 493 + fragment_selection_set, 494 + )) -> { 495 + // For introspection, we don't check type conditions - just execute the fragment 496 + case 497 + execute_introspection_selection_set( 498 + fragment_selection_set, 499 + value_obj, 500 + graphql_schema, 501 + ctx, 502 + fragments, 503 + path, 504 + ) 505 + { 506 + Ok(#(value.Object(fragment_fields), errs)) -> 507 + Ok(#( 508 + "__fragment_fields", 509 + value.Object(fragment_fields), 510 + errs, 511 + )) 512 + Ok(#(val, errs)) -> 513 + Ok(#("__fragment_fields", val, errs)) 514 + Error(_err) -> Error(Nil) 515 + } 516 + } 517 + Ok(_) -> Error(Nil) 518 + // Invalid fragment definition 519 + } 520 + } 521 + parser.InlineFragment(_type_condition_opt, inline_selections) -> { 522 + // For introspection, inline fragments always execute (no type checking needed) 523 + // Execute the inline fragment's selections on this object 524 + let inline_selection_set = 525 + parser.SelectionSet(inline_selections) 526 + case 527 + execute_introspection_selection_set( 528 + inline_selection_set, 529 + value_obj, 530 + graphql_schema, 531 + ctx, 532 + fragments, 533 + path, 534 + ) 535 + { 536 + Ok(#(value.Object(fragment_fields), errs)) -> 537 + // Return fragment fields to be merged 538 + Ok(#( 539 + "__fragment_fields", 540 + value.Object(fragment_fields), 541 + errs, 542 + )) 543 + Ok(#(val, errs)) -> Ok(#("__fragment_fields", val, errs)) 544 + Error(_err) -> Error(Nil) 545 + } 546 + } 547 + parser.Field(name, _alias, _arguments, nested_selections) -> { 548 + // Find the field in the object 549 + case list.key_find(fields, name) { 550 + Ok(field_value) -> { 551 + // Handle nested selections 552 + case nested_selections { 553 + [] -> Ok(#(name, field_value, [])) 554 + _ -> { 555 + let selection_set = 556 + parser.SelectionSet(nested_selections) 557 + case 558 + execute_introspection_selection_set( 559 + selection_set, 560 + field_value, 561 + graphql_schema, 562 + ctx, 563 + fragments, 564 + [name, ..path], 565 + ) 566 + { 567 + Ok(#(nested_data, nested_errors)) -> 568 + Ok(#(name, nested_data, nested_errors)) 569 + Error(err) -> { 570 + let error = GraphQLError(err, [name, ..path]) 571 + Ok(#(name, value.Null, [error])) 572 + } 573 + } 574 + } 575 + } 576 + } 577 + Error(_) -> { 578 + let error = 579 + GraphQLError("Field '" <> name <> "' not found", path) 580 + Ok(#(name, value.Null, [error])) 581 + } 582 + } 583 + } 584 + } 585 + }) 586 + 587 + // Collect all data and errors, merging fragment fields 588 + let #(data, errors) = 589 + results 590 + |> list.fold(#([], []), fn(acc, r) { 591 + let #(fields_acc, errors_acc) = acc 592 + case r { 593 + Ok(#("__fragment_fields", value.Object(fragment_fields), errs)) -> { 594 + // Merge fragment fields into parent 595 + #( 596 + list.append(fields_acc, fragment_fields), 597 + list.append(errors_acc, errs), 598 + ) 599 + } 600 + Ok(#(name, val, errs)) -> { 601 + // Regular field 602 + #( 603 + list.append(fields_acc, [#(name, val)]), 604 + list.append(errors_acc, errs), 605 + ) 606 + } 607 + Error(_) -> acc 608 + } 609 + }) 610 + 611 + Ok(#(value.Object(data), errors)) 612 + } 613 + _ -> 614 + Error( 615 + "Expected object, list, or null for introspection selection set", 616 + ) 617 + } 618 + } 619 + } 620 + }
+199
graphql/src/graphql/introspection.gleam
··· 1 + /// GraphQL Introspection 2 + /// 3 + /// Implements the GraphQL introspection system per the GraphQL spec. 4 + /// Provides __schema, __type, and __typename meta-fields. 5 + import gleam/list 6 + import gleam/option 7 + import graphql/schema 8 + import graphql/value 9 + 10 + /// Build introspection value for __schema 11 + pub fn schema_introspection(graphql_schema: schema.Schema) -> value.Value { 12 + let query_type = schema.query_type(graphql_schema) 13 + 14 + // Build list of all types in the schema 15 + let all_types = get_all_types(graphql_schema) 16 + 17 + value.Object([ 18 + #("queryType", type_ref(query_type)), 19 + #("mutationType", value.Null), 20 + #("subscriptionType", value.Null), 21 + #("types", value.List(all_types)), 22 + #("directives", value.List([])), 23 + ]) 24 + } 25 + 26 + /// Get all types from the schema 27 + fn get_all_types(graphql_schema: schema.Schema) -> List(value.Value) { 28 + let query_type = schema.query_type(graphql_schema) 29 + 30 + // Collect all types by traversing the schema 31 + let mut_collected_types = collect_types_from_type(query_type, []) 32 + 33 + // Deduplicate by type name 34 + let type_names = list.map(mut_collected_types, schema.type_name) 35 + let unique_types = 36 + list.zip(type_names, mut_collected_types) 37 + |> list.unique 38 + |> list.map(fn(pair) { pair.1 }) 39 + 40 + // Add any built-in scalars that aren't already in the list 41 + let all_built_ins = [ 42 + schema.string_type(), 43 + schema.int_type(), 44 + schema.float_type(), 45 + schema.boolean_type(), 46 + schema.id_type(), 47 + ] 48 + 49 + let collected_names = list.map(unique_types, schema.type_name) 50 + let missing_built_ins = 51 + list.filter(all_built_ins, fn(built_in) { 52 + let built_in_name = schema.type_name(built_in) 53 + !list.contains(collected_names, built_in_name) 54 + }) 55 + 56 + let all_types = list.append(unique_types, missing_built_ins) 57 + 58 + // Convert all types to introspection values 59 + list.map(all_types, type_introspection) 60 + } 61 + 62 + /// Collect all types referenced in a type (recursively) 63 + fn collect_types_from_type( 64 + t: schema.Type, 65 + acc: List(schema.Type), 66 + ) -> List(schema.Type) { 67 + case 68 + list.any(acc, fn(existing) { 69 + schema.type_name(existing) == schema.type_name(t) 70 + }) 71 + { 72 + True -> acc 73 + // Already collected this type 74 + False -> { 75 + let new_acc = [t, ..acc] 76 + 77 + // Recursively collect types from fields if this is an object type 78 + case schema.is_object(t) { 79 + True -> { 80 + let fields = schema.get_fields(t) 81 + list.fold(fields, new_acc, fn(acc2, field) { 82 + let field_type = schema.field_type(field) 83 + collect_types_from_type_deep(field_type, acc2) 84 + }) 85 + } 86 + False -> { 87 + // Check if it's a wrapping type (List or NonNull) 88 + case schema.inner_type(t) { 89 + option.Some(inner) -> collect_types_from_type_deep(inner, new_acc) 90 + option.None -> new_acc 91 + } 92 + } 93 + } 94 + } 95 + } 96 + } 97 + 98 + /// Helper to unwrap LIST and NON_NULL and collect the inner type 99 + fn collect_types_from_type_deep( 100 + t: schema.Type, 101 + acc: List(schema.Type), 102 + ) -> List(schema.Type) { 103 + // Check if this is a wrapping type (List or NonNull) 104 + case schema.inner_type(t) { 105 + option.Some(inner) -> collect_types_from_type_deep(inner, acc) 106 + option.None -> collect_types_from_type(t, acc) 107 + } 108 + } 109 + 110 + /// Build full type introspection value 111 + fn type_introspection(t: schema.Type) -> value.Value { 112 + let kind = schema.type_kind(t) 113 + let type_name = schema.type_name(t) 114 + 115 + // Get inner type for LIST and NON_NULL 116 + let of_type = case schema.inner_type(t) { 117 + option.Some(inner) -> type_ref(inner) 118 + option.None -> value.Null 119 + } 120 + 121 + // Determine fields based on kind 122 + let fields = case kind { 123 + "OBJECT" -> value.List(get_fields_for_type(t)) 124 + _ -> value.Null 125 + } 126 + 127 + // Handle wrapping types (LIST/NON_NULL) differently 128 + let name = case kind { 129 + "LIST" -> value.Null 130 + "NON_NULL" -> value.Null 131 + _ -> value.String(type_name) 132 + } 133 + 134 + value.Object([ 135 + #("kind", value.String(kind)), 136 + #("name", name), 137 + #("description", value.Null), 138 + #("fields", fields), 139 + #("interfaces", value.List([])), 140 + #("possibleTypes", value.Null), 141 + #("enumValues", value.Null), 142 + #("inputFields", value.Null), 143 + #("ofType", of_type), 144 + ]) 145 + } 146 + 147 + /// Get fields for a type (if it's an object type) 148 + fn get_fields_for_type(t: schema.Type) -> List(value.Value) { 149 + let fields = schema.get_fields(t) 150 + 151 + list.map(fields, fn(field) { 152 + let field_type_val = schema.field_type(field) 153 + let args = schema.field_arguments(field) 154 + 155 + value.Object([ 156 + #("name", value.String(schema.field_name(field))), 157 + #("description", value.String(schema.field_description(field))), 158 + #("args", value.List(list.map(args, argument_introspection))), 159 + #("type", type_ref(field_type_val)), 160 + #("isDeprecated", value.Boolean(False)), 161 + #("deprecationReason", value.Null), 162 + ]) 163 + }) 164 + } 165 + 166 + /// Build introspection for an argument 167 + fn argument_introspection(arg: schema.Argument) -> value.Value { 168 + value.Object([ 169 + #("name", value.String(schema.argument_name(arg))), 170 + #("description", value.String(schema.argument_description(arg))), 171 + #("type", type_ref(schema.argument_type(arg))), 172 + #("defaultValue", value.Null), 173 + ]) 174 + } 175 + 176 + /// Build a type reference (simplified version of type_introspection for field types) 177 + fn type_ref(t: schema.Type) -> value.Value { 178 + let kind = schema.type_kind(t) 179 + let type_name = schema.type_name(t) 180 + 181 + // Get inner type for LIST and NON_NULL 182 + let of_type = case schema.inner_type(t) { 183 + option.Some(inner) -> type_ref(inner) 184 + option.None -> value.Null 185 + } 186 + 187 + // Handle wrapping types (LIST/NON_NULL) differently 188 + let name = case kind { 189 + "LIST" -> value.Null 190 + "NON_NULL" -> value.Null 191 + _ -> value.String(type_name) 192 + } 193 + 194 + value.Object([ 195 + #("kind", value.String(kind)), 196 + #("name", name), 197 + #("ofType", of_type), 198 + ]) 199 + }
+299
graphql/src/graphql/lexer.gleam
··· 1 + /// GraphQL Lexer - Tokenization 2 + /// 3 + /// Per GraphQL spec Section 2 - Language 4 + /// Converts source text into a sequence of lexical tokens 5 + import gleam/list 6 + import gleam/result 7 + import gleam/string 8 + 9 + /// GraphQL token types 10 + pub type Token { 11 + // Punctuators 12 + BraceOpen 13 + BraceClose 14 + ParenOpen 15 + ParenClose 16 + BracketOpen 17 + BracketClose 18 + Colon 19 + Comma 20 + Pipe 21 + Equals 22 + At 23 + Dollar 24 + Spread 25 + 26 + // Values 27 + Name(String) 28 + Int(String) 29 + Float(String) 30 + String(String) 31 + 32 + // Ignored tokens (kept for optional whitespace preservation) 33 + Whitespace 34 + Comment(String) 35 + } 36 + 37 + pub type LexerError { 38 + UnexpectedCharacter(String, Int) 39 + UnterminatedString(Int) 40 + InvalidNumber(String, Int) 41 + } 42 + 43 + /// Tokenize a GraphQL source string into a list of tokens 44 + /// 45 + /// Filters out whitespace and comments by default 46 + pub fn tokenize(source: String) -> Result(List(Token), LexerError) { 47 + source 48 + |> string.to_graphemes 49 + |> tokenize_graphemes([], 0) 50 + |> result.map(filter_ignored) 51 + } 52 + 53 + /// Internal: Tokenize graphemes recursively 54 + fn tokenize_graphemes( 55 + graphemes: List(String), 56 + acc: List(Token), 57 + pos: Int, 58 + ) -> Result(List(Token), LexerError) { 59 + case graphemes { 60 + [] -> Ok(list.reverse(acc)) 61 + 62 + // Whitespace 63 + [" ", ..rest] | ["\t", ..rest] | ["\n", ..rest] | ["\r", ..rest] -> 64 + tokenize_graphemes(rest, [Whitespace, ..acc], pos + 1) 65 + 66 + // Comments 67 + ["#", ..rest] -> { 68 + let #(comment, remaining) = take_until_newline(rest) 69 + tokenize_graphemes(remaining, [Comment(comment), ..acc], pos + 1) 70 + } 71 + 72 + // Punctuators 73 + ["{", ..rest] -> tokenize_graphemes(rest, [BraceOpen, ..acc], pos + 1) 74 + ["}", ..rest] -> tokenize_graphemes(rest, [BraceClose, ..acc], pos + 1) 75 + ["(", ..rest] -> tokenize_graphemes(rest, [ParenOpen, ..acc], pos + 1) 76 + [")", ..rest] -> tokenize_graphemes(rest, [ParenClose, ..acc], pos + 1) 77 + ["[", ..rest] -> tokenize_graphemes(rest, [BracketOpen, ..acc], pos + 1) 78 + ["]", ..rest] -> tokenize_graphemes(rest, [BracketClose, ..acc], pos + 1) 79 + [":", ..rest] -> tokenize_graphemes(rest, [Colon, ..acc], pos + 1) 80 + [",", ..rest] -> tokenize_graphemes(rest, [Comma, ..acc], pos + 1) 81 + ["|", ..rest] -> tokenize_graphemes(rest, [Pipe, ..acc], pos + 1) 82 + ["=", ..rest] -> tokenize_graphemes(rest, [Equals, ..acc], pos + 1) 83 + ["@", ..rest] -> tokenize_graphemes(rest, [At, ..acc], pos + 1) 84 + ["$", ..rest] -> tokenize_graphemes(rest, [Dollar, ..acc], pos + 1) 85 + 86 + // Spread (...) 87 + [".", ".", ".", ..rest] -> 88 + tokenize_graphemes(rest, [Spread, ..acc], pos + 3) 89 + 90 + // Strings 91 + ["\"", ..rest] -> { 92 + case take_string(rest, []) { 93 + Ok(#(str, remaining)) -> 94 + tokenize_graphemes(remaining, [String(str), ..acc], pos + 1) 95 + Error(err) -> Error(err) 96 + } 97 + } 98 + 99 + // Numbers (Int or Float) - check for minus or digits 100 + ["-", ..] 101 + | ["0", ..] 102 + | ["1", ..] 103 + | ["2", ..] 104 + | ["3", ..] 105 + | ["4", ..] 106 + | ["5", ..] 107 + | ["6", ..] 108 + | ["7", ..] 109 + | ["8", ..] 110 + | ["9", ..] -> { 111 + case take_number(graphemes) { 112 + Ok(#(num_str, is_float, remaining)) -> { 113 + let token = case is_float { 114 + True -> Float(num_str) 115 + False -> Int(num_str) 116 + } 117 + tokenize_graphemes(remaining, [token, ..acc], pos + 1) 118 + } 119 + Error(err) -> Error(err) 120 + } 121 + } 122 + 123 + // Names (identifiers) - must start with letter or underscore 124 + [char, ..] -> { 125 + case is_name_start(char) { 126 + True -> { 127 + let #(name, remaining) = take_name(graphemes) 128 + tokenize_graphemes(remaining, [Name(name), ..acc], pos + 1) 129 + } 130 + False -> Error(UnexpectedCharacter(char, pos)) 131 + } 132 + } 133 + } 134 + } 135 + 136 + /// Take characters until newline 137 + fn take_until_newline(graphemes: List(String)) -> #(String, List(String)) { 138 + let #(chars, rest) = take_while(graphemes, fn(c) { c != "\n" && c != "\r" }) 139 + #(string.concat(chars), rest) 140 + } 141 + 142 + /// Take string contents (handles escapes) 143 + fn take_string( 144 + graphemes: List(String), 145 + acc: List(String), 146 + ) -> Result(#(String, List(String)), LexerError) { 147 + case graphemes { 148 + [] -> Error(UnterminatedString(0)) 149 + 150 + ["\"", ..rest] -> Ok(#(string.concat(list.reverse(acc)), rest)) 151 + 152 + ["\\", "n", ..rest] -> take_string(rest, ["\n", ..acc]) 153 + ["\\", "r", ..rest] -> take_string(rest, ["\r", ..acc]) 154 + ["\\", "t", ..rest] -> take_string(rest, ["\t", ..acc]) 155 + ["\\", "\"", ..rest] -> take_string(rest, ["\"", ..acc]) 156 + ["\\", "\\", ..rest] -> take_string(rest, ["\\", ..acc]) 157 + 158 + [char, ..rest] -> take_string(rest, [char, ..acc]) 159 + } 160 + } 161 + 162 + /// Take a number (int or float) 163 + fn take_number( 164 + graphemes: List(String), 165 + ) -> Result(#(String, Bool, List(String)), LexerError) { 166 + let #(num_chars, rest) = take_while(graphemes, is_number_char) 167 + let num_str = string.concat(num_chars) 168 + 169 + let is_float = 170 + string.contains(num_str, ".") 171 + || string.contains(num_str, "e") 172 + || string.contains(num_str, "E") 173 + 174 + Ok(#(num_str, is_float, rest)) 175 + } 176 + 177 + /// Take a name (identifier) 178 + fn take_name(graphemes: List(String)) -> #(String, List(String)) { 179 + let #(name_chars, rest) = take_while(graphemes, is_name_char) 180 + #(string.concat(name_chars), rest) 181 + } 182 + 183 + /// Take characters while predicate is true 184 + fn take_while( 185 + graphemes: List(String), 186 + predicate: fn(String) -> Bool, 187 + ) -> #(List(String), List(String)) { 188 + do_take_while(graphemes, predicate, []) 189 + } 190 + 191 + fn do_take_while( 192 + graphemes: List(String), 193 + predicate: fn(String) -> Bool, 194 + acc: List(String), 195 + ) -> #(List(String), List(String)) { 196 + case graphemes { 197 + [char, ..rest] -> { 198 + case predicate(char) { 199 + True -> do_take_while(rest, predicate, [char, ..acc]) 200 + False -> #(list.reverse(acc), graphemes) 201 + } 202 + } 203 + _ -> #(list.reverse(acc), graphemes) 204 + } 205 + } 206 + 207 + /// Check if character can start a name 208 + fn is_name_start(char: String) -> Bool { 209 + case char { 210 + "a" 211 + | "b" 212 + | "c" 213 + | "d" 214 + | "e" 215 + | "f" 216 + | "g" 217 + | "h" 218 + | "i" 219 + | "j" 220 + | "k" 221 + | "l" 222 + | "m" 223 + | "n" 224 + | "o" 225 + | "p" 226 + | "q" 227 + | "r" 228 + | "s" 229 + | "t" 230 + | "u" 231 + | "v" 232 + | "w" 233 + | "x" 234 + | "y" 235 + | "z" -> True 236 + "A" 237 + | "B" 238 + | "C" 239 + | "D" 240 + | "E" 241 + | "F" 242 + | "G" 243 + | "H" 244 + | "I" 245 + | "J" 246 + | "K" 247 + | "L" 248 + | "M" 249 + | "N" 250 + | "O" 251 + | "P" 252 + | "Q" 253 + | "R" 254 + | "S" 255 + | "T" 256 + | "U" 257 + | "V" 258 + | "W" 259 + | "X" 260 + | "Y" 261 + | "Z" -> True 262 + "_" -> True 263 + _ -> False 264 + } 265 + } 266 + 267 + /// Check if character can be part of a name 268 + fn is_name_char(char: String) -> Bool { 269 + is_name_start(char) || is_digit(char) 270 + } 271 + 272 + /// Check if character is a digit 273 + fn is_digit(char: String) -> Bool { 274 + case char { 275 + "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" -> True 276 + _ -> False 277 + } 278 + } 279 + 280 + /// Check if character can be part of a number 281 + fn is_number_char(char: String) -> Bool { 282 + is_digit(char) 283 + || char == "." 284 + || char == "e" 285 + || char == "E" 286 + || char == "-" 287 + || char == "+" 288 + } 289 + 290 + /// Filter out ignored tokens (whitespace and comments) 291 + fn filter_ignored(tokens: List(Token)) -> List(Token) { 292 + list.filter(tokens, fn(token) { 293 + case token { 294 + Whitespace -> False 295 + Comment(_) -> False 296 + _ -> True 297 + } 298 + }) 299 + }
+336
graphql/src/graphql/parser.gleam
··· 1 + /// GraphQL Parser - Build AST from tokens 2 + /// 3 + /// Per GraphQL spec Section 2 - Language 4 + /// Converts a token stream into an Abstract Syntax Tree 5 + import gleam/list 6 + import gleam/option.{type Option, None, Some} 7 + import gleam/result 8 + import graphql/lexer 9 + 10 + /// GraphQL Document (top-level) 11 + pub type Document { 12 + Document(operations: List(Operation)) 13 + } 14 + 15 + /// GraphQL Operation 16 + pub type Operation { 17 + Query(SelectionSet) 18 + NamedQuery(name: String, variables: List(Variable), selections: SelectionSet) 19 + Mutation(SelectionSet) 20 + NamedMutation( 21 + name: String, 22 + variables: List(Variable), 23 + selections: SelectionSet, 24 + ) 25 + FragmentDefinition( 26 + name: String, 27 + type_condition: String, 28 + selections: SelectionSet, 29 + ) 30 + } 31 + 32 + /// Selection Set (list of fields) 33 + pub type SelectionSet { 34 + SelectionSet(selections: List(Selection)) 35 + } 36 + 37 + /// Selection (field or fragment) 38 + pub type Selection { 39 + Field( 40 + name: String, 41 + alias: Option(String), 42 + arguments: List(Argument), 43 + selections: List(Selection), 44 + ) 45 + FragmentSpread(name: String) 46 + InlineFragment(type_condition: Option(String), selections: List(Selection)) 47 + } 48 + 49 + /// Argument (name: value) 50 + pub type Argument { 51 + Argument(name: String, value: ArgumentValue) 52 + } 53 + 54 + /// Argument value types 55 + pub type ArgumentValue { 56 + IntValue(String) 57 + FloatValue(String) 58 + StringValue(String) 59 + BooleanValue(Bool) 60 + NullValue 61 + ListValue(List(ArgumentValue)) 62 + ObjectValue(List(#(String, ArgumentValue))) 63 + VariableValue(String) 64 + } 65 + 66 + /// Variable definition 67 + pub type Variable { 68 + Variable(name: String, type_: String) 69 + } 70 + 71 + pub type ParseError { 72 + UnexpectedToken(lexer.Token, String) 73 + UnexpectedEndOfInput(String) 74 + LexerError(lexer.LexerError) 75 + } 76 + 77 + /// Parse a GraphQL query string into a Document 78 + pub fn parse(source: String) -> Result(Document, ParseError) { 79 + source 80 + |> lexer.tokenize 81 + |> result.map_error(LexerError) 82 + |> result.try(parse_document) 83 + } 84 + 85 + /// Parse tokens into a Document 86 + fn parse_document(tokens: List(lexer.Token)) -> Result(Document, ParseError) { 87 + case tokens { 88 + [] -> Error(UnexpectedEndOfInput("Expected query or operation")) 89 + _ -> { 90 + case parse_operations(tokens, []) { 91 + Ok(#(operations, _remaining)) -> Ok(Document(operations)) 92 + Error(err) -> Error(err) 93 + } 94 + } 95 + } 96 + } 97 + 98 + /// Parse operations (queries/mutations) 99 + fn parse_operations( 100 + tokens: List(lexer.Token), 101 + acc: List(Operation), 102 + ) -> Result(#(List(Operation), List(lexer.Token)), ParseError) { 103 + case tokens { 104 + [] -> Ok(#(list.reverse(acc), [])) 105 + 106 + // Named query: "query Name { ... }" 107 + [lexer.Name("query"), lexer.Name(name), ..rest] -> { 108 + case parse_selection_set(rest) { 109 + Ok(#(selections, remaining)) -> { 110 + let op = NamedQuery(name, [], selections) 111 + parse_operations(remaining, [op, ..acc]) 112 + } 113 + Error(err) -> Error(err) 114 + } 115 + } 116 + 117 + // Fragment definition: "fragment Name on Type { ... }" 118 + [ 119 + lexer.Name("fragment"), 120 + lexer.Name(name), 121 + lexer.Name("on"), 122 + lexer.Name(type_condition), 123 + ..rest 124 + ] -> { 125 + case parse_selection_set(rest) { 126 + Ok(#(selections, remaining)) -> { 127 + let op = FragmentDefinition(name, type_condition, selections) 128 + parse_operations(remaining, [op, ..acc]) 129 + } 130 + Error(err) -> Error(err) 131 + } 132 + } 133 + 134 + // Anonymous query: "{ ... }" 135 + [lexer.BraceOpen, ..] -> { 136 + case parse_selection_set(tokens) { 137 + Ok(#(selections, remaining)) -> { 138 + let op = Query(selections) 139 + // Don't continue parsing if we have operations already - single anonymous query 140 + case acc { 141 + [] -> Ok(#(list.reverse([op]), remaining)) 142 + _ -> parse_operations(remaining, [op, ..acc]) 143 + } 144 + } 145 + Error(err) -> Error(err) 146 + } 147 + } 148 + 149 + // Any other token when we have operations means we're done 150 + _ -> { 151 + case acc { 152 + [] -> 153 + Error(UnexpectedToken( 154 + list.first(tokens) |> result.unwrap(lexer.BraceClose), 155 + "Expected query, mutation, fragment, or '{'", 156 + )) 157 + _ -> Ok(#(list.reverse(acc), tokens)) 158 + } 159 + } 160 + } 161 + } 162 + 163 + /// Parse selection set: { field1 field2 ... } 164 + fn parse_selection_set( 165 + tokens: List(lexer.Token), 166 + ) -> Result(#(SelectionSet, List(lexer.Token)), ParseError) { 167 + case tokens { 168 + [lexer.BraceOpen, ..rest] -> { 169 + case parse_selections(rest, []) { 170 + Ok(#(selections, [lexer.BraceClose, ..remaining])) -> 171 + Ok(#(SelectionSet(selections), remaining)) 172 + Ok(#(_, _remaining)) -> 173 + Error(UnexpectedEndOfInput("Expected '}' to close selection set")) 174 + Error(err) -> Error(err) 175 + } 176 + } 177 + [token, ..] -> Error(UnexpectedToken(token, "Expected '{'")) 178 + [] -> Error(UnexpectedEndOfInput("Expected '{'")) 179 + } 180 + } 181 + 182 + /// Parse selections (fields) 183 + fn parse_selections( 184 + tokens: List(lexer.Token), 185 + acc: List(Selection), 186 + ) -> Result(#(List(Selection), List(lexer.Token)), ParseError) { 187 + case tokens { 188 + // End of selection set 189 + [lexer.BraceClose, ..] -> Ok(#(list.reverse(acc), tokens)) 190 + 191 + // Inline fragment: "... on Type { ... }" - Check this BEFORE fragment spread 192 + [lexer.Spread, lexer.Name("on"), lexer.Name(type_condition), ..rest] -> { 193 + case parse_selection_set(rest) { 194 + Ok(#(SelectionSet(selections), remaining)) -> { 195 + let inline = InlineFragment(Some(type_condition), selections) 196 + parse_selections(remaining, [inline, ..acc]) 197 + } 198 + Error(err) -> Error(err) 199 + } 200 + } 201 + 202 + // Fragment spread: "...FragmentName" 203 + [lexer.Spread, lexer.Name(name), ..rest] -> { 204 + let spread = FragmentSpread(name) 205 + parse_selections(rest, [spread, ..acc]) 206 + } 207 + 208 + // Field 209 + [lexer.Name(name), ..rest] -> { 210 + case parse_field(name, rest) { 211 + Ok(#(field, remaining)) -> { 212 + parse_selections(remaining, [field, ..acc]) 213 + } 214 + Error(err) -> Error(err) 215 + } 216 + } 217 + 218 + [] -> Error(UnexpectedEndOfInput("Expected field or '}'")) 219 + [token, ..] -> 220 + Error(UnexpectedToken(token, "Expected field name or fragment")) 221 + } 222 + } 223 + 224 + /// Parse a field with optional arguments and nested selections 225 + fn parse_field( 226 + name: String, 227 + tokens: List(lexer.Token), 228 + ) -> Result(#(Selection, List(lexer.Token)), ParseError) { 229 + // Parse arguments if present 230 + let #(arguments, after_args) = case tokens { 231 + [lexer.ParenOpen, ..] -> { 232 + case parse_arguments(tokens) { 233 + Ok(result) -> result 234 + Error(_err) -> #([], tokens) 235 + // No arguments 236 + } 237 + } 238 + _ -> #([], tokens) 239 + } 240 + 241 + // Parse nested selection set if present 242 + case after_args { 243 + [lexer.BraceOpen, ..] -> { 244 + case parse_nested_selections(after_args) { 245 + Ok(#(nested, remaining)) -> 246 + Ok(#(Field(name, None, arguments, nested), remaining)) 247 + Error(err) -> Error(err) 248 + } 249 + } 250 + _ -> Ok(#(Field(name, None, arguments, []), after_args)) 251 + } 252 + } 253 + 254 + /// Parse nested selections for a field 255 + fn parse_nested_selections( 256 + tokens: List(lexer.Token), 257 + ) -> Result(#(List(Selection), List(lexer.Token)), ParseError) { 258 + case tokens { 259 + [lexer.BraceOpen, ..rest] -> { 260 + case parse_selections(rest, []) { 261 + Ok(#(selections, [lexer.BraceClose, ..remaining])) -> 262 + Ok(#(selections, remaining)) 263 + Ok(#(_, _remaining)) -> 264 + Error(UnexpectedEndOfInput( 265 + "Expected '}' to close nested selection set", 266 + )) 267 + Error(err) -> Error(err) 268 + } 269 + } 270 + _ -> Ok(#([], tokens)) 271 + } 272 + } 273 + 274 + /// Parse arguments: (arg1: value1, arg2: value2) 275 + fn parse_arguments( 276 + tokens: List(lexer.Token), 277 + ) -> Result(#(List(Argument), List(lexer.Token)), ParseError) { 278 + case tokens { 279 + [lexer.ParenOpen, ..rest] -> { 280 + case parse_argument_list(rest, []) { 281 + Ok(#(args, [lexer.ParenClose, ..remaining])) -> Ok(#(args, remaining)) 282 + Ok(#(_, _remaining)) -> 283 + Error(UnexpectedEndOfInput("Expected ')' to close arguments")) 284 + Error(err) -> Error(err) 285 + } 286 + } 287 + _ -> Ok(#([], tokens)) 288 + } 289 + } 290 + 291 + /// Parse list of arguments 292 + fn parse_argument_list( 293 + tokens: List(lexer.Token), 294 + acc: List(Argument), 295 + ) -> Result(#(List(Argument), List(lexer.Token)), ParseError) { 296 + case tokens { 297 + // End of arguments 298 + [lexer.ParenClose, ..] -> Ok(#(list.reverse(acc), tokens)) 299 + 300 + // Argument: name: value 301 + [lexer.Name(name), lexer.Colon, ..rest] -> { 302 + case parse_argument_value(rest) { 303 + Ok(#(value, remaining)) -> { 304 + let arg = Argument(name, value) 305 + // Skip optional comma 306 + let after_comma = case remaining { 307 + [lexer.Comma, ..r] -> r 308 + _ -> remaining 309 + } 310 + parse_argument_list(after_comma, [arg, ..acc]) 311 + } 312 + Error(err) -> Error(err) 313 + } 314 + } 315 + 316 + [] -> Error(UnexpectedEndOfInput("Expected argument or ')'")) 317 + [token, ..] -> Error(UnexpectedToken(token, "Expected argument name")) 318 + } 319 + } 320 + 321 + /// Parse argument value 322 + fn parse_argument_value( 323 + tokens: List(lexer.Token), 324 + ) -> Result(#(ArgumentValue, List(lexer.Token)), ParseError) { 325 + case tokens { 326 + [lexer.Int(val), ..rest] -> Ok(#(IntValue(val), rest)) 327 + [lexer.Float(val), ..rest] -> Ok(#(FloatValue(val), rest)) 328 + [lexer.String(val), ..rest] -> Ok(#(StringValue(val), rest)) 329 + [lexer.Name("true"), ..rest] -> Ok(#(BooleanValue(True), rest)) 330 + [lexer.Name("false"), ..rest] -> Ok(#(BooleanValue(False), rest)) 331 + [lexer.Name("null"), ..rest] -> Ok(#(NullValue, rest)) 332 + [lexer.Dollar, lexer.Name(name), ..rest] -> Ok(#(VariableValue(name), rest)) 333 + [] -> Error(UnexpectedEndOfInput("Expected value")) 334 + [token, ..] -> Error(UnexpectedToken(token, "Expected value")) 335 + } 336 + }
+294
graphql/src/graphql/schema.gleam
··· 1 + /// GraphQL Schema - Type System 2 + /// 3 + /// Per GraphQL spec Section 3 - Type System 4 + /// Defines the type system including scalars, objects, enums, etc. 5 + import gleam/list 6 + import gleam/option.{type Option, None} 7 + import graphql/value 8 + 9 + /// Resolver context - will contain request context, data loaders, etc. 10 + pub type Context { 11 + Context(data: Option(value.Value)) 12 + } 13 + 14 + /// Field resolver function type 15 + pub type Resolver = 16 + fn(Context) -> Result(value.Value, String) 17 + 18 + /// GraphQL Type 19 + pub opaque type Type { 20 + ScalarType(name: String) 21 + ObjectType(name: String, description: String, fields: List(Field)) 22 + EnumType(name: String, description: String, values: List(EnumValue)) 23 + ListType(inner_type: Type) 24 + NonNullType(inner_type: Type) 25 + } 26 + 27 + /// GraphQL Field 28 + pub opaque type Field { 29 + Field( 30 + name: String, 31 + field_type: Type, 32 + description: String, 33 + arguments: List(Argument), 34 + resolver: Resolver, 35 + ) 36 + } 37 + 38 + /// GraphQL Argument 39 + pub opaque type Argument { 40 + Argument( 41 + name: String, 42 + arg_type: Type, 43 + description: String, 44 + default_value: Option(value.Value), 45 + ) 46 + } 47 + 48 + /// GraphQL Enum Value 49 + pub opaque type EnumValue { 50 + EnumValue(name: String, description: String) 51 + } 52 + 53 + /// GraphQL Schema 54 + pub opaque type Schema { 55 + Schema(query_type: Type, mutation_type: Option(Type)) 56 + } 57 + 58 + // Built-in scalar types 59 + pub fn string_type() -> Type { 60 + ScalarType("String") 61 + } 62 + 63 + pub fn int_type() -> Type { 64 + ScalarType("Int") 65 + } 66 + 67 + pub fn float_type() -> Type { 68 + ScalarType("Float") 69 + } 70 + 71 + pub fn boolean_type() -> Type { 72 + ScalarType("Boolean") 73 + } 74 + 75 + pub fn id_type() -> Type { 76 + ScalarType("ID") 77 + } 78 + 79 + // Type constructors 80 + pub fn object_type( 81 + name: String, 82 + description: String, 83 + fields: List(Field), 84 + ) -> Type { 85 + ObjectType(name, description, fields) 86 + } 87 + 88 + pub fn enum_type( 89 + name: String, 90 + description: String, 91 + values: List(EnumValue), 92 + ) -> Type { 93 + EnumType(name, description, values) 94 + } 95 + 96 + pub fn list_type(inner_type: Type) -> Type { 97 + ListType(inner_type) 98 + } 99 + 100 + pub fn non_null(inner_type: Type) -> Type { 101 + NonNullType(inner_type) 102 + } 103 + 104 + // Field constructors 105 + pub fn field( 106 + name: String, 107 + field_type: Type, 108 + description: String, 109 + resolver: Resolver, 110 + ) -> Field { 111 + Field(name, field_type, description, [], resolver) 112 + } 113 + 114 + pub fn field_with_args( 115 + name: String, 116 + field_type: Type, 117 + description: String, 118 + arguments: List(Argument), 119 + resolver: Resolver, 120 + ) -> Field { 121 + Field(name, field_type, description, arguments, resolver) 122 + } 123 + 124 + // Argument constructor 125 + pub fn argument( 126 + name: String, 127 + arg_type: Type, 128 + description: String, 129 + default_value: Option(value.Value), 130 + ) -> Argument { 131 + Argument(name, arg_type, description, default_value) 132 + } 133 + 134 + // Enum value constructor 135 + pub fn enum_value(name: String, description: String) -> EnumValue { 136 + EnumValue(name, description) 137 + } 138 + 139 + // Schema constructor 140 + pub fn schema(query_type: Type, mutation_type: Option(Type)) -> Schema { 141 + Schema(query_type, mutation_type) 142 + } 143 + 144 + // Accessors 145 + pub fn type_name(t: Type) -> String { 146 + case t { 147 + ScalarType(name) -> name 148 + ObjectType(name, _, _) -> name 149 + EnumType(name, _, _) -> name 150 + ListType(inner) -> "[" <> type_name(inner) <> "]" 151 + NonNullType(inner) -> type_name(inner) <> "!" 152 + } 153 + } 154 + 155 + pub fn field_name(f: Field) -> String { 156 + case f { 157 + Field(name, _, _, _, _) -> name 158 + } 159 + } 160 + 161 + pub fn query_type(s: Schema) -> Type { 162 + case s { 163 + Schema(query_type, _) -> query_type 164 + } 165 + } 166 + 167 + pub fn is_non_null(t: Type) -> Bool { 168 + case t { 169 + NonNullType(_) -> True 170 + _ -> False 171 + } 172 + } 173 + 174 + pub fn is_list(t: Type) -> Bool { 175 + case t { 176 + ListType(_) -> True 177 + _ -> False 178 + } 179 + } 180 + 181 + // Field resolution helpers 182 + pub fn resolve_field(field: Field, ctx: Context) -> Result(value.Value, String) { 183 + case field { 184 + Field(_, _, _, _, resolver) -> resolver(ctx) 185 + } 186 + } 187 + 188 + pub fn get_field(t: Type, field_name: String) -> Option(Field) { 189 + case t { 190 + ObjectType(_, _, fields) -> { 191 + list.find(fields, fn(f) { 192 + case f { 193 + Field(name, _, _, _, _) -> name == field_name 194 + } 195 + }) 196 + |> option.from_result 197 + } 198 + _ -> None 199 + } 200 + } 201 + 202 + /// Get the type of a field 203 + pub fn field_type(field: Field) -> Type { 204 + case field { 205 + Field(_, ft, _, _, _) -> ft 206 + } 207 + } 208 + 209 + /// Get all fields from an ObjectType 210 + pub fn get_fields(t: Type) -> List(Field) { 211 + case t { 212 + ObjectType(_, _, fields) -> fields 213 + _ -> [] 214 + } 215 + } 216 + 217 + /// Get field description 218 + pub fn field_description(field: Field) -> String { 219 + case field { 220 + Field(_, _, desc, _, _) -> desc 221 + } 222 + } 223 + 224 + /// Get field arguments 225 + pub fn field_arguments(field: Field) -> List(Argument) { 226 + case field { 227 + Field(_, _, _, args, _) -> args 228 + } 229 + } 230 + 231 + /// Get argument name 232 + pub fn argument_name(arg: Argument) -> String { 233 + case arg { 234 + Argument(name, _, _, _) -> name 235 + } 236 + } 237 + 238 + /// Get argument type 239 + pub fn argument_type(arg: Argument) -> Type { 240 + case arg { 241 + Argument(_, arg_type, _, _) -> arg_type 242 + } 243 + } 244 + 245 + /// Get argument description 246 + pub fn argument_description(arg: Argument) -> String { 247 + case arg { 248 + Argument(_, _, desc, _) -> desc 249 + } 250 + } 251 + 252 + /// Check if type is a scalar 253 + pub fn is_scalar(t: Type) -> Bool { 254 + case t { 255 + ScalarType(_) -> True 256 + _ -> False 257 + } 258 + } 259 + 260 + /// Check if type is an object 261 + pub fn is_object(t: Type) -> Bool { 262 + case t { 263 + ObjectType(_, _, _) -> True 264 + _ -> False 265 + } 266 + } 267 + 268 + /// Check if type is an enum 269 + pub fn is_enum(t: Type) -> Bool { 270 + case t { 271 + EnumType(_, _, _) -> True 272 + _ -> False 273 + } 274 + } 275 + 276 + /// Get the inner type from a wrapping type (List or NonNull) 277 + pub fn inner_type(t: Type) -> option.Option(Type) { 278 + case t { 279 + ListType(inner) -> option.Some(inner) 280 + NonNullType(inner) -> option.Some(inner) 281 + _ -> option.None 282 + } 283 + } 284 + 285 + /// Get the kind of a type as a string for introspection 286 + pub fn type_kind(t: Type) -> String { 287 + case t { 288 + ScalarType(_) -> "SCALAR" 289 + ObjectType(_, _, _) -> "OBJECT" 290 + EnumType(_, _, _) -> "ENUM" 291 + ListType(_) -> "LIST" 292 + NonNullType(_) -> "NON_NULL" 293 + } 294 + }
+32
graphql/src/graphql/value.gleam
··· 1 + /// GraphQL Value types 2 + /// 3 + /// Per GraphQL spec Section 2 - Language, values can be scalars, enums, 4 + /// lists, or objects. This module defines the core Value type used throughout 5 + /// the GraphQL implementation. 6 + /// A GraphQL value that can be used in queries, responses, and variables 7 + pub type Value { 8 + /// Represents null/absence of a value 9 + Null 10 + 11 + /// Integer value (32-bit signed integer per spec) 12 + Int(Int) 13 + 14 + /// Floating point value (IEEE 754 double precision per spec) 15 + Float(Float) 16 + 17 + /// UTF-8 string value 18 + String(String) 19 + 20 + /// Boolean true or false 21 + Boolean(Bool) 22 + 23 + /// Enum value represented as a string (e.g., "ACTIVE", "PENDING") 24 + Enum(String) 25 + 26 + /// Ordered list of values 27 + List(List(Value)) 28 + 29 + /// Unordered set of key-value pairs 30 + /// Using list of tuples for simplicity and ordering preservation 31 + Object(List(#(String, Value))) 32 + }
+349
graphql/test/graphql/executor_test.gleam
··· 1 + /// Tests for GraphQL Executor 2 + /// 3 + /// Tests query execution combining parser + schema + resolvers 4 + import gleam/list 5 + import gleam/option.{None} 6 + import gleeunit/should 7 + import graphql/executor 8 + import graphql/schema 9 + import graphql/value 10 + 11 + // Helper to create a simple test schema 12 + fn test_schema() -> schema.Schema { 13 + let query_type = 14 + schema.object_type("Query", "Root query type", [ 15 + schema.field("hello", schema.string_type(), "Hello field", fn(_ctx) { 16 + Ok(value.String("world")) 17 + }), 18 + schema.field("number", schema.int_type(), "Number field", fn(_ctx) { 19 + Ok(value.Int(42)) 20 + }), 21 + schema.field_with_args( 22 + "greet", 23 + schema.string_type(), 24 + "Greet someone", 25 + [schema.argument("name", schema.string_type(), "Name to greet", None)], 26 + fn(_ctx) { Ok(value.String("Hello, Alice!")) }, 27 + ), 28 + ]) 29 + 30 + schema.schema(query_type, None) 31 + } 32 + 33 + // Nested object schema for testing 34 + fn nested_schema() -> schema.Schema { 35 + let user_type = 36 + schema.object_type("User", "A user", [ 37 + schema.field("id", schema.id_type(), "User ID", fn(_ctx) { 38 + Ok(value.String("123")) 39 + }), 40 + schema.field("name", schema.string_type(), "User name", fn(_ctx) { 41 + Ok(value.String("Alice")) 42 + }), 43 + ]) 44 + 45 + let query_type = 46 + schema.object_type("Query", "Root query type", [ 47 + schema.field("user", user_type, "Get user", fn(_ctx) { 48 + Ok( 49 + value.Object([ 50 + #("id", value.String("123")), 51 + #("name", value.String("Alice")), 52 + ]), 53 + ) 54 + }), 55 + ]) 56 + 57 + schema.schema(query_type, None) 58 + } 59 + 60 + pub fn execute_simple_query_test() { 61 + let schema = test_schema() 62 + let query = "{ hello }" 63 + 64 + let result = executor.execute(query, schema, schema.Context(None)) 65 + 66 + should.be_ok(result) 67 + |> fn(response) { 68 + case response { 69 + executor.Response( 70 + data: value.Object([#("hello", value.String("world"))]), 71 + errors: [], 72 + ) -> True 73 + _ -> False 74 + } 75 + } 76 + |> should.be_true 77 + } 78 + 79 + pub fn execute_multiple_fields_test() { 80 + let schema = test_schema() 81 + let query = "{ hello number }" 82 + 83 + let result = executor.execute(query, schema, schema.Context(None)) 84 + 85 + should.be_ok(result) 86 + } 87 + 88 + pub fn execute_nested_query_test() { 89 + let schema = nested_schema() 90 + let query = "{ user { id name } }" 91 + 92 + let result = executor.execute(query, schema, schema.Context(None)) 93 + 94 + should.be_ok(result) 95 + } 96 + 97 + pub fn execute_field_with_arguments_test() { 98 + let schema = test_schema() 99 + let query = "{ greet(name: \"Alice\") }" 100 + 101 + let result = executor.execute(query, schema, schema.Context(None)) 102 + 103 + should.be_ok(result) 104 + } 105 + 106 + pub fn execute_invalid_query_returns_error_test() { 107 + let schema = test_schema() 108 + let query = "{ invalid }" 109 + 110 + let result = executor.execute(query, schema, schema.Context(None)) 111 + 112 + // Should return error since field doesn't exist 113 + case result { 114 + Ok(executor.Response(_, [_, ..])) -> should.be_true(True) 115 + Error(_) -> should.be_true(True) 116 + _ -> should.be_true(False) 117 + } 118 + } 119 + 120 + pub fn execute_parse_error_returns_error_test() { 121 + let schema = test_schema() 122 + let query = "{ invalid syntax" 123 + 124 + let result = executor.execute(query, schema, schema.Context(None)) 125 + 126 + should.be_error(result) 127 + } 128 + 129 + pub fn execute_typename_introspection_test() { 130 + let schema = test_schema() 131 + let query = "{ __typename }" 132 + 133 + let result = executor.execute(query, schema, schema.Context(None)) 134 + 135 + should.be_ok(result) 136 + |> fn(response) { 137 + case response { 138 + executor.Response( 139 + data: value.Object([#("__typename", value.String("Query"))]), 140 + errors: [], 141 + ) -> True 142 + _ -> False 143 + } 144 + } 145 + |> should.be_true 146 + } 147 + 148 + pub fn execute_typename_with_regular_fields_test() { 149 + let schema = test_schema() 150 + let query = "{ __typename hello }" 151 + 152 + let result = executor.execute(query, schema, schema.Context(None)) 153 + 154 + should.be_ok(result) 155 + |> fn(response) { 156 + case response { 157 + executor.Response( 158 + data: value.Object([ 159 + #("__typename", value.String("Query")), 160 + #("hello", value.String("world")), 161 + ]), 162 + errors: [], 163 + ) -> True 164 + _ -> False 165 + } 166 + } 167 + |> should.be_true 168 + } 169 + 170 + pub fn execute_schema_introspection_query_type_test() { 171 + let schema = test_schema() 172 + let query = "{ __schema { queryType { name } } }" 173 + 174 + let result = executor.execute(query, schema, schema.Context(None)) 175 + 176 + should.be_ok(result) 177 + |> fn(response) { 178 + case response { 179 + executor.Response( 180 + data: value.Object([ 181 + #( 182 + "__schema", 183 + value.Object([ 184 + #("queryType", value.Object([#("name", value.String("Query"))])), 185 + ]), 186 + ), 187 + ]), 188 + errors: [], 189 + ) -> True 190 + _ -> False 191 + } 192 + } 193 + |> should.be_true 194 + } 195 + 196 + // Fragment execution tests 197 + pub fn execute_simple_fragment_spread_test() { 198 + let schema = nested_schema() 199 + let query = 200 + " 201 + fragment UserFields on User { 202 + id 203 + name 204 + } 205 + 206 + { user { ...UserFields } } 207 + " 208 + 209 + let result = executor.execute(query, schema, schema.Context(None)) 210 + 211 + // Test should pass - fragment should be expanded 212 + should.be_ok(result) 213 + |> fn(response) { 214 + case response { 215 + executor.Response(data: value.Object(fields), errors: []) -> { 216 + // Check that we have a user field 217 + case list.key_find(fields, "user") { 218 + Ok(value.Object(user_fields)) -> { 219 + // Check that user has id and name fields 220 + let has_id = case list.key_find(user_fields, "id") { 221 + Ok(value.String("123")) -> True 222 + _ -> False 223 + } 224 + let has_name = case list.key_find(user_fields, "name") { 225 + Ok(value.String("Alice")) -> True 226 + _ -> False 227 + } 228 + has_id && has_name 229 + } 230 + _ -> False 231 + } 232 + } 233 + _ -> False 234 + } 235 + } 236 + |> should.be_true 237 + } 238 + 239 + // Test for list fields with nested selections 240 + pub fn execute_list_with_nested_selections_test() { 241 + // Create a schema with a list field 242 + let user_type = 243 + schema.object_type("User", "A user", [ 244 + schema.field("id", schema.id_type(), "User ID", fn(ctx) { 245 + case ctx.data { 246 + option.Some(value.Object(fields)) -> { 247 + case list.key_find(fields, "id") { 248 + Ok(id_val) -> Ok(id_val) 249 + Error(_) -> Ok(value.Null) 250 + } 251 + } 252 + _ -> Ok(value.Null) 253 + } 254 + }), 255 + schema.field("name", schema.string_type(), "User name", fn(ctx) { 256 + case ctx.data { 257 + option.Some(value.Object(fields)) -> { 258 + case list.key_find(fields, "name") { 259 + Ok(name_val) -> Ok(name_val) 260 + Error(_) -> Ok(value.Null) 261 + } 262 + } 263 + _ -> Ok(value.Null) 264 + } 265 + }), 266 + schema.field("email", schema.string_type(), "User email", fn(ctx) { 267 + case ctx.data { 268 + option.Some(value.Object(fields)) -> { 269 + case list.key_find(fields, "email") { 270 + Ok(email_val) -> Ok(email_val) 271 + Error(_) -> Ok(value.Null) 272 + } 273 + } 274 + _ -> Ok(value.Null) 275 + } 276 + }), 277 + ]) 278 + 279 + let list_type = schema.list_type(user_type) 280 + 281 + let query_type = 282 + schema.object_type("Query", "Root query type", [ 283 + schema.field("users", list_type, "Get all users", fn(_ctx) { 284 + // Return a list of user objects 285 + Ok( 286 + value.List([ 287 + value.Object([ 288 + #("id", value.String("1")), 289 + #("name", value.String("Alice")), 290 + #("email", value.String("alice@example.com")), 291 + ]), 292 + value.Object([ 293 + #("id", value.String("2")), 294 + #("name", value.String("Bob")), 295 + #("email", value.String("bob@example.com")), 296 + ]), 297 + ]), 298 + ) 299 + }), 300 + ]) 301 + 302 + let schema = schema.schema(query_type, None) 303 + 304 + // Query with nested field selection - only request id and name, not email 305 + let query = "{ users { id name } }" 306 + 307 + let result = executor.execute(query, schema, schema.Context(None)) 308 + 309 + // The result should only contain id and name fields, NOT email 310 + should.be_ok(result) 311 + |> fn(response) { 312 + case response { 313 + executor.Response(data: value.Object(fields), errors: []) -> { 314 + case list.key_find(fields, "users") { 315 + Ok(value.List(users)) -> { 316 + // Should have 2 users 317 + list.length(users) == 2 318 + && list.all(users, fn(user) { 319 + case user { 320 + value.Object(user_fields) -> { 321 + // Each user should have exactly 2 fields: id and name 322 + let field_count = list.length(user_fields) 323 + let has_id = case list.key_find(user_fields, "id") { 324 + Ok(_) -> True 325 + _ -> False 326 + } 327 + let has_name = case list.key_find(user_fields, "name") { 328 + Ok(_) -> True 329 + _ -> False 330 + } 331 + let has_email = case list.key_find(user_fields, "email") { 332 + Ok(_) -> True 333 + _ -> False 334 + } 335 + // Should have id and name, but NOT email 336 + field_count == 2 && has_id && has_name && !has_email 337 + } 338 + _ -> False 339 + } 340 + }) 341 + } 342 + _ -> False 343 + } 344 + } 345 + _ -> False 346 + } 347 + } 348 + |> should.be_true 349 + }
+311
graphql/test/graphql/introspection_test.gleam
··· 1 + /// Tests for GraphQL Introspection 2 + /// 3 + /// Comprehensive tests for introspection queries 4 + import gleam/list 5 + import gleam/option.{None} 6 + import gleeunit/should 7 + import graphql/executor 8 + import graphql/schema 9 + import graphql/value 10 + 11 + // Helper to create a simple test schema 12 + fn test_schema() -> schema.Schema { 13 + let query_type = 14 + schema.object_type("Query", "Root query type", [ 15 + schema.field("hello", schema.string_type(), "Hello field", fn(_ctx) { 16 + Ok(value.String("world")) 17 + }), 18 + schema.field("number", schema.int_type(), "Number field", fn(_ctx) { 19 + Ok(value.Int(42)) 20 + }), 21 + ]) 22 + 23 + schema.schema(query_type, None) 24 + } 25 + 26 + /// Test: Multiple scalar fields on __schema 27 + /// This test verifies that all requested fields on __schema are returned 28 + pub fn schema_multiple_fields_test() { 29 + let schema = test_schema() 30 + let query = 31 + "{ __schema { queryType { name } mutationType { name } subscriptionType { name } } }" 32 + 33 + let result = executor.execute(query, schema, schema.Context(None)) 34 + 35 + should.be_ok(result) 36 + |> fn(response) { 37 + case response { 38 + executor.Response(data: value.Object(fields), errors: []) -> { 39 + // Check that we have __schema field 40 + case list.key_find(fields, "__schema") { 41 + Ok(value.Object(schema_fields)) -> { 42 + // Check for all three fields 43 + let has_query_type = case 44 + list.key_find(schema_fields, "queryType") 45 + { 46 + Ok(value.Object(_)) -> True 47 + _ -> False 48 + } 49 + let has_mutation_type = case 50 + list.key_find(schema_fields, "mutationType") 51 + { 52 + Ok(value.Null) -> True 53 + // Should be null 54 + _ -> False 55 + } 56 + let has_subscription_type = case 57 + list.key_find(schema_fields, "subscriptionType") 58 + { 59 + Ok(value.Null) -> True 60 + // Should be null 61 + _ -> False 62 + } 63 + has_query_type && has_mutation_type && has_subscription_type 64 + } 65 + _ -> False 66 + } 67 + } 68 + _ -> False 69 + } 70 + } 71 + |> should.be_true 72 + } 73 + 74 + /// Test: types field with other fields 75 + /// Verifies that the types array is returned along with other fields 76 + pub fn schema_types_with_other_fields_test() { 77 + let schema = test_schema() 78 + let query = "{ __schema { queryType { name } types { name } } }" 79 + 80 + let result = executor.execute(query, schema, schema.Context(None)) 81 + 82 + should.be_ok(result) 83 + |> fn(response) { 84 + case response { 85 + executor.Response(data: value.Object(fields), errors: []) -> { 86 + case list.key_find(fields, "__schema") { 87 + Ok(value.Object(schema_fields)) -> { 88 + // Check for both fields 89 + let has_query_type = case 90 + list.key_find(schema_fields, "queryType") 91 + { 92 + Ok(value.Object(qt_fields)) -> { 93 + case list.key_find(qt_fields, "name") { 94 + Ok(value.String("Query")) -> True 95 + _ -> False 96 + } 97 + } 98 + _ -> False 99 + } 100 + let has_types = case list.key_find(schema_fields, "types") { 101 + Ok(value.List(types)) -> { 102 + // Should have 6 types: Query + 5 scalars 103 + list.length(types) == 6 104 + } 105 + _ -> False 106 + } 107 + has_query_type && has_types 108 + } 109 + _ -> False 110 + } 111 + } 112 + _ -> False 113 + } 114 + } 115 + |> should.be_true 116 + } 117 + 118 + /// Test: All __schema top-level fields 119 + /// Verifies that a query with all possible __schema fields returns all of them 120 + pub fn schema_all_fields_test() { 121 + let schema = test_schema() 122 + let query = 123 + "{ __schema { queryType { name } mutationType { name } subscriptionType { name } types { name } directives { name } } }" 124 + 125 + let result = executor.execute(query, schema, schema.Context(None)) 126 + 127 + should.be_ok(result) 128 + |> fn(response) { 129 + case response { 130 + executor.Response(data: value.Object(fields), errors: []) -> { 131 + case list.key_find(fields, "__schema") { 132 + Ok(value.Object(schema_fields)) -> { 133 + // Check for all five fields 134 + let field_count = list.length(schema_fields) 135 + // Should have exactly 5 fields 136 + field_count == 5 137 + } 138 + _ -> False 139 + } 140 + } 141 + _ -> False 142 + } 143 + } 144 + |> should.be_true 145 + } 146 + 147 + /// Test: Field order doesn't matter 148 + /// Verifies that field order in the query doesn't affect results 149 + pub fn schema_field_order_test() { 150 + let schema = test_schema() 151 + let query1 = "{ __schema { types { name } queryType { name } } }" 152 + let query2 = "{ __schema { queryType { name } types { name } } }" 153 + 154 + let result1 = executor.execute(query1, schema, schema.Context(None)) 155 + let result2 = executor.execute(query2, schema, schema.Context(None)) 156 + 157 + // Both should succeed 158 + should.be_ok(result1) 159 + should.be_ok(result2) 160 + 161 + // Both should have the same fields 162 + case result1, result2 { 163 + Ok(executor.Response(data: value.Object(fields1), errors: [])), 164 + Ok(executor.Response(data: value.Object(fields2), errors: [])) 165 + -> { 166 + case 167 + list.key_find(fields1, "__schema"), 168 + list.key_find(fields2, "__schema") 169 + { 170 + Ok(value.Object(schema_fields1)), Ok(value.Object(schema_fields2)) -> { 171 + let count1 = list.length(schema_fields1) 172 + let count2 = list.length(schema_fields2) 173 + // Both should have 2 fields 174 + count1 == 2 && count2 == 2 175 + } 176 + _, _ -> False 177 + } 178 + } 179 + _, _ -> False 180 + } 181 + |> should.be_true 182 + } 183 + 184 + /// Test: Nested introspection on types 185 + /// Verifies that nested field selections work correctly 186 + pub fn schema_types_nested_fields_test() { 187 + let schema = test_schema() 188 + let query = "{ __schema { types { name kind fields { name } } } }" 189 + 190 + let result = executor.execute(query, schema, schema.Context(None)) 191 + 192 + should.be_ok(result) 193 + |> fn(response) { 194 + case response { 195 + executor.Response(data: value.Object(fields), errors: []) -> { 196 + case list.key_find(fields, "__schema") { 197 + Ok(value.Object(schema_fields)) -> { 198 + case list.key_find(schema_fields, "types") { 199 + Ok(value.List(types)) -> { 200 + // Check that each type has name, kind, and fields 201 + list.all(types, fn(type_val) { 202 + case type_val { 203 + value.Object(type_fields) -> { 204 + let has_name = case list.key_find(type_fields, "name") { 205 + Ok(_) -> True 206 + _ -> False 207 + } 208 + let has_kind = case list.key_find(type_fields, "kind") { 209 + Ok(_) -> True 210 + _ -> False 211 + } 212 + let has_fields = case 213 + list.key_find(type_fields, "fields") 214 + { 215 + Ok(_) -> True 216 + // Can be null or list 217 + _ -> False 218 + } 219 + has_name && has_kind && has_fields 220 + } 221 + _ -> False 222 + } 223 + }) 224 + } 225 + _ -> False 226 + } 227 + } 228 + _ -> False 229 + } 230 + } 231 + _ -> False 232 + } 233 + } 234 + |> should.be_true 235 + } 236 + 237 + /// Test: Empty nested selections on null fields 238 + /// Verifies that querying nested fields on null values doesn't cause errors 239 + pub fn schema_null_field_with_deep_nesting_test() { 240 + let schema = test_schema() 241 + let query = "{ __schema { mutationType { name fields { name } } } }" 242 + 243 + let result = executor.execute(query, schema, schema.Context(None)) 244 + 245 + should.be_ok(result) 246 + |> fn(response) { 247 + case response { 248 + executor.Response(data: value.Object(fields), errors: []) -> { 249 + case list.key_find(fields, "__schema") { 250 + Ok(value.Object(schema_fields)) -> { 251 + case list.key_find(schema_fields, "mutationType") { 252 + Ok(value.Null) -> True 253 + // Should be null, not error 254 + _ -> False 255 + } 256 + } 257 + _ -> False 258 + } 259 + } 260 + _ -> False 261 + } 262 + } 263 + |> should.be_true 264 + } 265 + 266 + /// Test: Inline fragments in introspection 267 + /// Verifies that inline fragments work correctly in introspection queries (like GraphiQL uses) 268 + pub fn schema_inline_fragment_test() { 269 + let schema = test_schema() 270 + let query = "{ __schema { types { ... on __Type { kind name } } } }" 271 + 272 + let result = executor.execute(query, schema, schema.Context(None)) 273 + 274 + should.be_ok(result) 275 + |> fn(response) { 276 + case response { 277 + executor.Response(data: value.Object(fields), errors: []) -> { 278 + case list.key_find(fields, "__schema") { 279 + Ok(value.Object(schema_fields)) -> { 280 + case list.key_find(schema_fields, "types") { 281 + Ok(value.List(types)) -> { 282 + // Should have 6 types with kind and name fields 283 + list.length(types) == 6 284 + && list.all(types, fn(type_val) { 285 + case type_val { 286 + value.Object(type_fields) -> { 287 + let has_kind = case list.key_find(type_fields, "kind") { 288 + Ok(value.String(_)) -> True 289 + _ -> False 290 + } 291 + let has_name = case list.key_find(type_fields, "name") { 292 + Ok(value.String(_)) -> True 293 + _ -> False 294 + } 295 + has_kind && has_name 296 + } 297 + _ -> False 298 + } 299 + }) 300 + } 301 + _ -> False 302 + } 303 + } 304 + _ -> False 305 + } 306 + } 307 + _ -> False 308 + } 309 + } 310 + |> should.be_true 311 + }
+194
graphql/test/graphql/lexer_test.gleam
··· 1 + /// Tests for GraphQL Lexer (tokenization) 2 + /// 3 + /// GraphQL spec Section 2 - Language 4 + /// Token types: Punctuator, Name, IntValue, FloatValue, StringValue 5 + /// Ignored: Whitespace, LineTerminator, Comment, Comma 6 + import gleeunit/should 7 + import graphql/lexer.{ 8 + BraceClose, BraceOpen, Colon, Float, Int, Name, ParenClose, ParenOpen, String, 9 + } 10 + 11 + // Punctuator tests 12 + pub fn tokenize_brace_open_test() { 13 + lexer.tokenize("{") 14 + |> should.equal(Ok([BraceOpen])) 15 + } 16 + 17 + pub fn tokenize_brace_close_test() { 18 + lexer.tokenize("}") 19 + |> should.equal(Ok([BraceClose])) 20 + } 21 + 22 + pub fn tokenize_paren_open_test() { 23 + lexer.tokenize("(") 24 + |> should.equal(Ok([ParenOpen])) 25 + } 26 + 27 + pub fn tokenize_paren_close_test() { 28 + lexer.tokenize(")") 29 + |> should.equal(Ok([ParenClose])) 30 + } 31 + 32 + pub fn tokenize_colon_test() { 33 + lexer.tokenize(":") 34 + |> should.equal(Ok([Colon])) 35 + } 36 + 37 + // Name tests (identifiers) 38 + pub fn tokenize_simple_name_test() { 39 + lexer.tokenize("query") 40 + |> should.equal(Ok([Name("query")])) 41 + } 42 + 43 + pub fn tokenize_name_with_underscore_test() { 44 + lexer.tokenize("user_name") 45 + |> should.equal(Ok([Name("user_name")])) 46 + } 47 + 48 + pub fn tokenize_name_with_numbers_test() { 49 + lexer.tokenize("field123") 50 + |> should.equal(Ok([Name("field123")])) 51 + } 52 + 53 + // Int value tests 54 + pub fn tokenize_positive_int_test() { 55 + lexer.tokenize("42") 56 + |> should.equal(Ok([Int("42")])) 57 + } 58 + 59 + pub fn tokenize_negative_int_test() { 60 + lexer.tokenize("-42") 61 + |> should.equal(Ok([Int("-42")])) 62 + } 63 + 64 + pub fn tokenize_zero_test() { 65 + lexer.tokenize("0") 66 + |> should.equal(Ok([Int("0")])) 67 + } 68 + 69 + // Float value tests 70 + pub fn tokenize_simple_float_test() { 71 + lexer.tokenize("3.14") 72 + |> should.equal(Ok([Float("3.14")])) 73 + } 74 + 75 + pub fn tokenize_negative_float_test() { 76 + lexer.tokenize("-3.14") 77 + |> should.equal(Ok([Float("-3.14")])) 78 + } 79 + 80 + pub fn tokenize_float_with_exponent_test() { 81 + lexer.tokenize("1.5e10") 82 + |> should.equal(Ok([Float("1.5e10")])) 83 + } 84 + 85 + pub fn tokenize_float_with_negative_exponent_test() { 86 + lexer.tokenize("1.5e-10") 87 + |> should.equal(Ok([Float("1.5e-10")])) 88 + } 89 + 90 + // String value tests 91 + pub fn tokenize_empty_string_test() { 92 + lexer.tokenize("\"\"") 93 + |> should.equal(Ok([String("")])) 94 + } 95 + 96 + pub fn tokenize_simple_string_test() { 97 + lexer.tokenize("\"hello\"") 98 + |> should.equal(Ok([String("hello")])) 99 + } 100 + 101 + pub fn tokenize_string_with_spaces_test() { 102 + lexer.tokenize("\"hello world\"") 103 + |> should.equal(Ok([String("hello world")])) 104 + } 105 + 106 + pub fn tokenize_string_with_escape_test() { 107 + lexer.tokenize("\"hello\\nworld\"") 108 + |> should.equal(Ok([String("hello\nworld")])) 109 + } 110 + 111 + // Whitespace handling (should be filtered out by default) 112 + pub fn tokenize_with_spaces_test() { 113 + lexer.tokenize("query user") 114 + |> should.equal(Ok([Name("query"), Name("user")])) 115 + } 116 + 117 + pub fn tokenize_with_tabs_test() { 118 + lexer.tokenize("query\tuser") 119 + |> should.equal(Ok([Name("query"), Name("user")])) 120 + } 121 + 122 + pub fn tokenize_with_newlines_test() { 123 + lexer.tokenize("query\nuser") 124 + |> should.equal(Ok([Name("query"), Name("user")])) 125 + } 126 + 127 + // Comment tests (should be filtered out) 128 + pub fn tokenize_with_comment_test() { 129 + lexer.tokenize("query # this is a comment\nuser") 130 + |> should.equal(Ok([Name("query"), Name("user")])) 131 + } 132 + 133 + // Complex query tests 134 + pub fn tokenize_simple_query_test() { 135 + lexer.tokenize("{ user }") 136 + |> should.equal(Ok([BraceOpen, Name("user"), BraceClose])) 137 + } 138 + 139 + pub fn tokenize_query_with_field_test() { 140 + lexer.tokenize("{ user { name } }") 141 + |> should.equal( 142 + Ok([ 143 + BraceOpen, 144 + Name("user"), 145 + BraceOpen, 146 + Name("name"), 147 + BraceClose, 148 + BraceClose, 149 + ]), 150 + ) 151 + } 152 + 153 + pub fn tokenize_query_with_argument_test() { 154 + lexer.tokenize("{ user(id: 42) }") 155 + |> should.equal( 156 + Ok([ 157 + BraceOpen, 158 + Name("user"), 159 + ParenOpen, 160 + Name("id"), 161 + Colon, 162 + Int("42"), 163 + ParenClose, 164 + BraceClose, 165 + ]), 166 + ) 167 + } 168 + 169 + pub fn tokenize_query_with_string_argument_test() { 170 + lexer.tokenize("{ user(name: \"Alice\") }") 171 + |> should.equal( 172 + Ok([ 173 + BraceOpen, 174 + Name("user"), 175 + ParenOpen, 176 + Name("name"), 177 + Colon, 178 + String("Alice"), 179 + ParenClose, 180 + BraceClose, 181 + ]), 182 + ) 183 + } 184 + 185 + // Error cases - use a truly invalid character like backslash 186 + pub fn tokenize_invalid_character_test() { 187 + lexer.tokenize("query \\invalid") 188 + |> should.be_error() 189 + } 190 + 191 + pub fn tokenize_unclosed_string_test() { 192 + lexer.tokenize("\"unclosed") 193 + |> should.be_error() 194 + }
+247
graphql/test/graphql/parser_test.gleam
··· 1 + /// Tests for GraphQL Parser (AST building) 2 + /// 3 + /// GraphQL spec Section 2 - Language 4 + /// Parse tokens into Abstract Syntax Tree 5 + import gleam/option.{None} 6 + import gleeunit/should 7 + import graphql/parser 8 + 9 + // Simple query tests 10 + pub fn parse_empty_query_test() { 11 + "{ }" 12 + |> parser.parse 13 + |> should.be_ok 14 + } 15 + 16 + pub fn parse_single_field_test() { 17 + "{ user }" 18 + |> parser.parse 19 + |> should.be_ok 20 + |> fn(doc) { 21 + case doc { 22 + parser.Document([ 23 + parser.Query(parser.SelectionSet([ 24 + parser.Field(name: "user", alias: None, arguments: [], selections: []), 25 + ])), 26 + ]) -> True 27 + _ -> False 28 + } 29 + } 30 + |> should.be_true 31 + } 32 + 33 + pub fn parse_nested_fields_test() { 34 + "{ user { name } }" 35 + |> parser.parse 36 + |> should.be_ok 37 + |> fn(doc) { 38 + case doc { 39 + parser.Document([ 40 + parser.Query(parser.SelectionSet([ 41 + parser.Field( 42 + name: "user", 43 + alias: None, 44 + arguments: [], 45 + selections: [parser.Field("name", None, [], [])], 46 + ), 47 + ])), 48 + ]) -> True 49 + _ -> False 50 + } 51 + } 52 + |> should.be_true 53 + } 54 + 55 + pub fn parse_multiple_fields_test() { 56 + "{ user posts }" 57 + |> parser.parse 58 + |> should.be_ok 59 + |> fn(doc) { 60 + case doc { 61 + parser.Document([ 62 + parser.Query(parser.SelectionSet([ 63 + parser.Field(name: "user", alias: None, arguments: [], selections: []), 64 + parser.Field( 65 + name: "posts", 66 + alias: None, 67 + arguments: [], 68 + selections: [], 69 + ), 70 + ])), 71 + ]) -> True 72 + _ -> False 73 + } 74 + } 75 + |> should.be_true 76 + } 77 + 78 + // Arguments tests 79 + pub fn parse_field_with_int_argument_test() { 80 + "{ user(id: 42) }" 81 + |> parser.parse 82 + |> should.be_ok 83 + |> fn(doc) { 84 + case doc { 85 + parser.Document([ 86 + parser.Query(parser.SelectionSet([ 87 + parser.Field( 88 + name: "user", 89 + alias: None, 90 + arguments: [parser.Argument("id", parser.IntValue("42"))], 91 + selections: [], 92 + ), 93 + ])), 94 + ]) -> True 95 + _ -> False 96 + } 97 + } 98 + |> should.be_true 99 + } 100 + 101 + pub fn parse_field_with_string_argument_test() { 102 + "{ user(name: \"Alice\") }" 103 + |> parser.parse 104 + |> should.be_ok 105 + |> fn(doc) { 106 + case doc { 107 + parser.Document([ 108 + parser.Query(parser.SelectionSet([ 109 + parser.Field( 110 + name: "user", 111 + alias: None, 112 + arguments: [parser.Argument("name", parser.StringValue("Alice"))], 113 + selections: [], 114 + ), 115 + ])), 116 + ]) -> True 117 + _ -> False 118 + } 119 + } 120 + |> should.be_true 121 + } 122 + 123 + pub fn parse_field_with_multiple_arguments_test() { 124 + "{ user(id: 42, name: \"Alice\") }" 125 + |> parser.parse 126 + |> should.be_ok 127 + |> fn(doc) { 128 + case doc { 129 + parser.Document([ 130 + parser.Query(parser.SelectionSet([ 131 + parser.Field( 132 + name: "user", 133 + alias: None, 134 + arguments: [ 135 + parser.Argument("id", parser.IntValue("42")), 136 + parser.Argument("name", parser.StringValue("Alice")), 137 + ], 138 + selections: [], 139 + ), 140 + ])), 141 + ]) -> True 142 + _ -> False 143 + } 144 + } 145 + |> should.be_true 146 + } 147 + 148 + // Named operation tests 149 + pub fn parse_named_query_test() { 150 + "query GetUser { user }" 151 + |> parser.parse 152 + |> should.be_ok 153 + |> fn(doc) { 154 + case doc { 155 + parser.Document([ 156 + parser.NamedQuery( 157 + name: "GetUser", 158 + variables: [], 159 + selections: parser.SelectionSet([parser.Field("user", None, [], [])]), 160 + ), 161 + ]) -> True 162 + _ -> False 163 + } 164 + } 165 + |> should.be_true 166 + } 167 + 168 + // Complex query test 169 + pub fn parse_complex_query_test() { 170 + " 171 + query GetUserPosts { 172 + user(id: 1) { 173 + name 174 + posts { 175 + title 176 + content 177 + } 178 + } 179 + } 180 + " 181 + |> parser.parse 182 + |> should.be_ok 183 + } 184 + 185 + // Error cases 186 + pub fn parse_invalid_syntax_test() { 187 + "{ user" 188 + |> parser.parse 189 + |> should.be_error 190 + } 191 + 192 + pub fn parse_empty_string_test() { 193 + "" 194 + |> parser.parse 195 + |> should.be_error 196 + } 197 + 198 + pub fn parse_invalid_field_name_test() { 199 + "{ 123 }" 200 + |> parser.parse 201 + |> should.be_error 202 + } 203 + 204 + // Fragment tests 205 + pub fn parse_fragment_definition_test() { 206 + " 207 + fragment UserFields on User { 208 + id 209 + name 210 + } 211 + { user { ...UserFields } } 212 + " 213 + |> parser.parse 214 + |> should.be_ok 215 + |> fn(doc) { 216 + case doc { 217 + parser.Document([ 218 + parser.FragmentDefinition( 219 + name: "UserFields", 220 + type_condition: "User", 221 + selections: parser.SelectionSet([ 222 + parser.Field("id", None, [], []), 223 + parser.Field("name", None, [], []), 224 + ]), 225 + ), 226 + parser.Query(parser.SelectionSet([ 227 + parser.Field( 228 + name: "user", 229 + alias: None, 230 + arguments: [], 231 + selections: [parser.FragmentSpread("UserFields")], 232 + ), 233 + ])), 234 + ]) -> True 235 + _ -> False 236 + } 237 + } 238 + |> should.be_true 239 + } 240 + 241 + pub fn parse_inline_fragment_test() { 242 + " 243 + { user { ... on User { name } } } 244 + " 245 + |> parser.parse 246 + |> should.be_ok 247 + }
+84
graphql/test/graphql/schema_test.gleam
··· 1 + /// Tests for GraphQL Schema (Type System) 2 + /// 3 + /// GraphQL spec Section 3 - Type System 4 + /// Defines types, fields, and schema structure 5 + import gleam/option.{None} 6 + import gleeunit/should 7 + import graphql/schema 8 + import graphql/value 9 + 10 + // Type system tests 11 + pub fn create_scalar_type_test() { 12 + let string_type = schema.string_type() 13 + should.equal(schema.type_name(string_type), "String") 14 + } 15 + 16 + pub fn create_object_type_test() { 17 + let user_type = 18 + schema.object_type("User", "A user in the system", [ 19 + schema.field("id", schema.id_type(), "User ID", fn(_ctx) { 20 + Ok(value.String("123")) 21 + }), 22 + schema.field("name", schema.string_type(), "User name", fn(_ctx) { 23 + Ok(value.String("Alice")) 24 + }), 25 + ]) 26 + 27 + should.equal(schema.type_name(user_type), "User") 28 + } 29 + 30 + pub fn create_non_null_type_test() { 31 + let non_null_string = schema.non_null(schema.string_type()) 32 + should.be_true(schema.is_non_null(non_null_string)) 33 + } 34 + 35 + pub fn create_list_type_test() { 36 + let list_of_strings = schema.list_type(schema.string_type()) 37 + should.be_true(schema.is_list(list_of_strings)) 38 + } 39 + 40 + pub fn create_schema_test() { 41 + let query_type = 42 + schema.object_type("Query", "Root query type", [ 43 + schema.field("hello", schema.string_type(), "Hello field", fn(_ctx) { 44 + Ok(value.String("world")) 45 + }), 46 + ]) 47 + 48 + let graphql_schema = schema.schema(query_type, None) 49 + should.equal(schema.query_type(graphql_schema), query_type) 50 + } 51 + 52 + pub fn field_with_arguments_test() { 53 + let user_field = 54 + schema.field_with_args( 55 + "user", 56 + schema.string_type(), 57 + "Get user by ID", 58 + [schema.argument("id", schema.id_type(), "User ID", None)], 59 + fn(_ctx) { Ok(value.String("Alice")) }, 60 + ) 61 + 62 + should.equal(schema.field_name(user_field), "user") 63 + } 64 + 65 + pub fn enum_type_test() { 66 + let role_enum = 67 + schema.enum_type("Role", "User role", [ 68 + schema.enum_value("ADMIN", "Administrator"), 69 + schema.enum_value("USER", "Regular user"), 70 + ]) 71 + 72 + should.equal(schema.type_name(role_enum), "Role") 73 + } 74 + 75 + pub fn scalar_types_exist_test() { 76 + // Built-in scalar types 77 + let _string = schema.string_type() 78 + let _int = schema.int_type() 79 + let _float = schema.float_type() 80 + let _boolean = schema.boolean_type() 81 + let _id = schema.id_type() 82 + 83 + should.be_true(True) 84 + }
+87
graphql/test/graphql/value_test.gleam
··· 1 + /// Tests for GraphQL Value types 2 + /// 3 + /// GraphQL spec Section 2 - Language 4 + /// Values can be: Null, Int, Float, String, Boolean, Enum, List, Object 5 + import gleeunit/should 6 + import graphql/value.{Boolean, Enum, Float, Int, List, Null, Object, String} 7 + 8 + pub fn null_value_test() { 9 + let val = Null 10 + should.equal(val, Null) 11 + } 12 + 13 + pub fn int_value_test() { 14 + let val = Int(42) 15 + should.equal(val, Int(42)) 16 + } 17 + 18 + pub fn float_value_test() { 19 + let val = Float(3.14) 20 + should.equal(val, Float(3.14)) 21 + } 22 + 23 + pub fn string_value_test() { 24 + let val = String("hello") 25 + should.equal(val, String("hello")) 26 + } 27 + 28 + pub fn boolean_true_value_test() { 29 + let val = Boolean(True) 30 + should.equal(val, Boolean(True)) 31 + } 32 + 33 + pub fn boolean_false_value_test() { 34 + let val = Boolean(False) 35 + should.equal(val, Boolean(False)) 36 + } 37 + 38 + pub fn enum_value_test() { 39 + let val = Enum("ACTIVE") 40 + should.equal(val, Enum("ACTIVE")) 41 + } 42 + 43 + pub fn empty_list_value_test() { 44 + let val = List([]) 45 + should.equal(val, List([])) 46 + } 47 + 48 + pub fn list_of_ints_test() { 49 + let val = List([Int(1), Int(2), Int(3)]) 50 + should.equal(val, List([Int(1), Int(2), Int(3)])) 51 + } 52 + 53 + pub fn nested_list_test() { 54 + let val = List([List([Int(1), Int(2)]), List([Int(3), Int(4)])]) 55 + should.equal(val, List([List([Int(1), Int(2)]), List([Int(3), Int(4)])])) 56 + } 57 + 58 + pub fn empty_object_test() { 59 + let val = Object([]) 60 + should.equal(val, Object([])) 61 + } 62 + 63 + pub fn simple_object_test() { 64 + let val = Object([#("name", String("Alice")), #("age", Int(30))]) 65 + should.equal(val, Object([#("name", String("Alice")), #("age", Int(30))])) 66 + } 67 + 68 + pub fn nested_object_test() { 69 + let val = 70 + Object([ 71 + #("user", Object([#("name", String("Bob")), #("active", Boolean(True))])), 72 + #("count", Int(5)), 73 + ]) 74 + 75 + should.equal( 76 + val, 77 + Object([ 78 + #("user", Object([#("name", String("Bob")), #("active", Boolean(True))])), 79 + #("count", Int(5)), 80 + ]), 81 + ) 82 + } 83 + 84 + pub fn mixed_types_list_test() { 85 + let val = List([String("hello"), Int(42), Boolean(True), Null]) 86 + should.equal(val, List([String("hello"), Int(42), Boolean(True), Null])) 87 + }
+13
graphql/test/graphql_test.gleam
··· 1 + import gleeunit 2 + 3 + pub fn main() -> Nil { 4 + gleeunit.main() 5 + } 6 + 7 + // gleeunit test functions end in `_test` 8 + pub fn hello_world_test() { 9 + let name = "Joe" 10 + let greeting = "Hello, " <> name <> "!" 11 + 12 + assert greeting == "Hello, Joe!" 13 + }
+4
jetstream/.gitignore
··· 1 + *.beam 2 + *.ez 3 + /build 4 + erl_crash.dump
+24
jetstream/README.md
··· 1 + # jetstream 2 + 3 + [![Package Version](https://img.shields.io/hexpm/v/jetstream)](https://hex.pm/packages/jetstream) 4 + [![Hex Docs](https://img.shields.io/badge/hex-docs-ffaff3)](https://hexdocs.pm/jetstream/) 5 + 6 + ```sh 7 + gleam add jetstream@1 8 + ``` 9 + ```gleam 10 + import jetstream 11 + 12 + pub fn main() -> Nil { 13 + // TODO: An example of the project in use 14 + } 15 + ``` 16 + 17 + Further documentation can be found at <https://hexdocs.pm/jetstream>. 18 + 19 + ## Development 20 + 21 + ```sh 22 + gleam run # Run the project 23 + gleam test # Run the tests 24 + ```
+23
jetstream/gleam.toml
··· 1 + name = "jetstream" 2 + version = "1.0.0" 3 + 4 + # Fill out these fields if you intend to generate HTML documentation or publish 5 + # your project to the Hex package manager. 6 + # 7 + # description = "" 8 + # licences = ["Apache-2.0"] 9 + # repository = { type = "github", user = "", repo = "" } 10 + # links = [{ title = "Website", href = "" }] 11 + # 12 + # For a full reference of all the available options, you can have a look at 13 + # https://gleam.run/writing-gleam/gleam-toml/. 14 + 15 + [dependencies] 16 + gleam_stdlib = ">= 0.44.0 and < 2.0.0" 17 + gleam_erlang = ">= 1.0.0 and < 2.0.0" 18 + gleam_http = ">= 4.0.0 and < 5.0.0" 19 + gleam_json = ">= 3.0.2 and < 4.0.0" 20 + gun = ">= 2.2.0 and < 3.0.0" 21 + 22 + [dev-dependencies] 23 + gleeunit = ">= 1.0.0 and < 2.0.0"
+20
jetstream/manifest.toml
··· 1 + # This file was generated by Gleam 2 + # You typically do not need to edit this file 3 + 4 + packages = [ 5 + { name = "cowlib", version = "2.16.0", build_tools = ["make", "rebar3"], requirements = [], otp_app = "cowlib", source = "hex", outer_checksum = "7F478D80D66B747344F0EA7708C187645CFCC08B11AA424632F78E25BF05DB51" }, 6 + { name = "gleam_erlang", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_erlang", source = "hex", outer_checksum = "1124AD3AA21143E5AF0FC5CF3D9529F6DB8CA03E43A55711B60B6B7B3874375C" }, 7 + { name = "gleam_http", version = "4.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_http", source = "hex", outer_checksum = "82EA6A717C842456188C190AFB372665EA56CE13D8559BF3B1DD9E40F619EE0C" }, 8 + { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" }, 9 + { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" }, 10 + { name = "gleeunit", version = "1.6.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "FDC68A8C492B1E9B429249062CD9BAC9B5538C6FBF584817205D0998C42E1DAC" }, 11 + { name = "gun", version = "2.2.0", build_tools = ["make", "rebar3"], requirements = ["cowlib"], otp_app = "gun", source = "hex", outer_checksum = "76022700C64287FEB4DF93A1795CFF6741B83FB37415C40C34C38D2A4645261A" }, 12 + ] 13 + 14 + [requirements] 15 + gleam_erlang = { version = ">= 1.0.0 and < 2.0.0" } 16 + gleam_http = { version = ">= 4.0.0 and < 5.0.0" } 17 + gleam_json = { version = ">= 3.0.2 and < 4.0.0" } 18 + gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" } 19 + gleeunit = { version = ">= 1.0.0 and < 2.0.0" } 20 + gun = { version = ">= 2.2.0 and < 3.0.0" }
+233
jetstream/src/jetstream.gleam
··· 1 + import gleam/dynamic.{type Dynamic} 2 + import gleam/dynamic/decode 3 + import gleam/erlang/process.{type Pid} 4 + import gleam/io 5 + import gleam/json 6 + import gleam/list 7 + import gleam/option.{type Option} 8 + import gleam/string 9 + 10 + /// Jetstream event types 11 + pub type JetstreamEvent { 12 + CommitEvent(did: String, time_us: Int, commit: CommitData) 13 + IdentityEvent(did: String, time_us: Int, identity: IdentityData) 14 + AccountEvent(did: String, time_us: Int, account: AccountData) 15 + UnknownEvent(raw: String) 16 + } 17 + 18 + pub type CommitData { 19 + CommitData( 20 + rev: String, 21 + operation: String, 22 + collection: String, 23 + rkey: String, 24 + record: Option(Dynamic), 25 + cid: Option(String), 26 + ) 27 + } 28 + 29 + pub type IdentityData { 30 + IdentityData(did: String, handle: String, seq: Int, time: String) 31 + } 32 + 33 + pub type AccountData { 34 + AccountData(active: Bool, did: String, seq: Int, time: String) 35 + } 36 + 37 + /// Configuration for Jetstream consumer 38 + pub type JetstreamConfig { 39 + JetstreamConfig( 40 + endpoint: String, 41 + wanted_collections: List(String), 42 + wanted_dids: List(String), 43 + ) 44 + } 45 + 46 + /// Create a default configuration for US East endpoint 47 + pub fn default_config() -> JetstreamConfig { 48 + JetstreamConfig( 49 + endpoint: "wss://jetstream2.us-east.bsky.network/subscribe", 50 + wanted_collections: [], 51 + wanted_dids: [], 52 + ) 53 + } 54 + 55 + /// Build the WebSocket URL with query parameters 56 + pub fn build_url(config: JetstreamConfig) -> String { 57 + let base = config.endpoint 58 + let mut_params = [] 59 + 60 + // Add wanted collections (each as a separate query parameter) 61 + let mut_params = case config.wanted_collections { 62 + [] -> mut_params 63 + collections -> { 64 + let collection_params = 65 + list.map(collections, fn(col) { "wantedCollections=" <> col }) 66 + list.append(collection_params, mut_params) 67 + } 68 + } 69 + 70 + // Add wanted DIDs (each as a separate query parameter) 71 + let mut_params = case config.wanted_dids { 72 + [] -> mut_params 73 + dids -> { 74 + let did_params = list.map(dids, fn(did) { "wantedDids=" <> did }) 75 + list.append(did_params, mut_params) 76 + } 77 + } 78 + 79 + case mut_params { 80 + [] -> base 81 + params -> base <> "?" <> string.join(list.reverse(params), "&") 82 + } 83 + } 84 + 85 + /// Connect to Jetstream WebSocket using Erlang gun library 86 + @external(erlang, "jetstream_ws_ffi", "connect") 87 + pub fn connect(url: String, handler_pid: Pid) -> Result(Pid, Dynamic) 88 + 89 + /// Start consuming the Jetstream feed 90 + pub fn start_consumer( 91 + config: JetstreamConfig, 92 + on_event: fn(String) -> Nil, 93 + ) -> Nil { 94 + let url = build_url(config) 95 + io.println("🔗 Jetstream URL: " <> url) 96 + let self = process.self() 97 + let result = connect(url, self) 98 + 99 + case result { 100 + Ok(_conn_pid) -> { 101 + receive_loop(on_event) 102 + } 103 + Error(err) -> { 104 + io.println("Failed to connect to Jetstream") 105 + io.println_error(string.inspect(err)) 106 + } 107 + } 108 + } 109 + 110 + /// Receive loop for WebSocket messages 111 + fn receive_loop(on_event: fn(String) -> Nil) -> Nil { 112 + // Call Erlang to receive one message 113 + case receive_ws_message() { 114 + Ok(text) -> { 115 + on_event(text) 116 + receive_loop(on_event) 117 + } 118 + Error(_) -> { 119 + // Timeout or error, continue loop 120 + receive_loop(on_event) 121 + } 122 + } 123 + } 124 + 125 + /// Receive a WebSocket message from the message queue 126 + @external(erlang, "jetstream_ffi", "receive_ws_message") 127 + fn receive_ws_message() -> Result(String, Nil) 128 + 129 + /// Parse a JSON event string into a JetstreamEvent 130 + pub fn parse_event(json_string: String) -> JetstreamEvent { 131 + // Try to parse as commit event first 132 + case json.parse(json_string, commit_event_decoder()) { 133 + Ok(event) -> event 134 + Error(_) -> { 135 + // Try identity event 136 + case json.parse(json_string, identity_event_decoder()) { 137 + Ok(event) -> event 138 + Error(_) -> { 139 + // Try account event 140 + case json.parse(json_string, account_event_decoder()) { 141 + Ok(event) -> event 142 + Error(_) -> UnknownEvent(json_string) 143 + } 144 + } 145 + } 146 + } 147 + } 148 + } 149 + 150 + /// Decoder for commit events 151 + fn commit_event_decoder() { 152 + use did <- decode.field("did", decode.string) 153 + use time_us <- decode.field("time_us", decode.int) 154 + use commit <- decode.field("commit", commit_data_decoder()) 155 + decode.success(CommitEvent(did: did, time_us: time_us, commit: commit)) 156 + } 157 + 158 + /// Decoder for commit data - handles both create/update (with record) and delete (without) 159 + fn commit_data_decoder() { 160 + // Try decoder with record and cid fields first (for create/update) 161 + // If that fails, try without (for delete) 162 + decode.one_of(commit_with_record_decoder(), or: [ 163 + commit_without_record_decoder(), 164 + ]) 165 + } 166 + 167 + /// Decoder for commit with record (create/update operations) 168 + fn commit_with_record_decoder() { 169 + use rev <- decode.field("rev", decode.string) 170 + use operation <- decode.field("operation", decode.string) 171 + use collection <- decode.field("collection", decode.string) 172 + use rkey <- decode.field("rkey", decode.string) 173 + use record <- decode.field("record", decode.dynamic) 174 + use cid <- decode.field("cid", decode.string) 175 + decode.success(CommitData( 176 + rev: rev, 177 + operation: operation, 178 + collection: collection, 179 + rkey: rkey, 180 + record: option.Some(record), 181 + cid: option.Some(cid), 182 + )) 183 + } 184 + 185 + /// Decoder for commit without record (delete operations) 186 + fn commit_without_record_decoder() { 187 + use rev <- decode.field("rev", decode.string) 188 + use operation <- decode.field("operation", decode.string) 189 + use collection <- decode.field("collection", decode.string) 190 + use rkey <- decode.field("rkey", decode.string) 191 + decode.success(CommitData( 192 + rev: rev, 193 + operation: operation, 194 + collection: collection, 195 + rkey: rkey, 196 + record: option.None, 197 + cid: option.None, 198 + )) 199 + } 200 + 201 + /// Decoder for identity events 202 + fn identity_event_decoder() { 203 + use did <- decode.field("did", decode.string) 204 + use time_us <- decode.field("time_us", decode.int) 205 + use identity <- decode.field("identity", identity_data_decoder()) 206 + decode.success(IdentityEvent(did: did, time_us: time_us, identity: identity)) 207 + } 208 + 209 + /// Decoder for identity data 210 + fn identity_data_decoder() { 211 + use did <- decode.field("did", decode.string) 212 + use handle <- decode.field("handle", decode.string) 213 + use seq <- decode.field("seq", decode.int) 214 + use time <- decode.field("time", decode.string) 215 + decode.success(IdentityData(did: did, handle: handle, seq: seq, time: time)) 216 + } 217 + 218 + /// Decoder for account events 219 + fn account_event_decoder() { 220 + use did <- decode.field("did", decode.string) 221 + use time_us <- decode.field("time_us", decode.int) 222 + use account <- decode.field("account", account_data_decoder()) 223 + decode.success(AccountEvent(did: did, time_us: time_us, account: account)) 224 + } 225 + 226 + /// Decoder for account data 227 + fn account_data_decoder() { 228 + use active <- decode.field("active", decode.bool) 229 + use did <- decode.field("did", decode.string) 230 + use seq <- decode.field("seq", decode.int) 231 + use time <- decode.field("time", decode.string) 232 + decode.success(AccountData(active: active, did: did, seq: seq, time: time)) 233 + }
+25
jetstream/src/jetstream_ffi.erl
··· 1 + -module(jetstream_ffi). 2 + -export([receive_ws_message/0]). 3 + 4 + %% Receive a WebSocket text message from the process mailbox 5 + receive_ws_message() -> 6 + receive 7 + %% Handle gun_ws messages directly (they're coming to us, not the handler) 8 + {gun_ws, _ConnPid, _StreamRef, {text, Text}} -> 9 + {ok, Text}; 10 + {gun_ws, _ConnPid, _StreamRef, {binary, _Binary}} -> 11 + %% Ignore binary messages, try again 12 + receive_ws_message(); 13 + {gun_ws, _ConnPid, _StreamRef, close} -> 14 + {error, nil}; 15 + {gun_down, _ConnPid, _Protocol, _Reason, _KilledStreams} -> 16 + {error, nil}; 17 + {gun_error, _ConnPid, _StreamRef, _Reason} -> 18 + {error, nil}; 19 + _Other -> 20 + %% Ignore unexpected messages 21 + receive_ws_message() 22 + after 60000 -> 23 + %% Timeout - return error to continue loop 24 + {error, nil} 25 + end.
+142
jetstream/src/jetstream_ws_ffi.erl
··· 1 + -module(jetstream_ws_ffi). 2 + -export([connect/2]). 3 + 4 + %% Connect to WebSocket using gun 5 + connect(Url, HandlerPid) -> 6 + %% Start gun application and dependencies 7 + application:ensure_all_started(ssl), 8 + application:ensure_all_started(gun), 9 + 10 + %% Parse URL using uri_string 11 + UriMap = uri_string:parse(Url), 12 + #{scheme := SchemeStr, host := Host, path := Path} = UriMap, 13 + 14 + %% Get query string if present and append to path 15 + Query = maps:get(query, UriMap, undefined), 16 + PathWithQuery = case Query of 17 + undefined -> Path; 18 + <<>> -> Path; 19 + Q -> <<Path/binary, "?", Q/binary>> 20 + end, 21 + 22 + %% Get port, use defaults if not specified 23 + Port = maps:get(port, uri_string:parse(Url), 24 + case SchemeStr of 25 + <<"wss">> -> 443; 26 + <<"ws">> -> 80; 27 + _ -> 443 28 + end), 29 + 30 + %% Determine transport 31 + Transport = case SchemeStr of 32 + <<"wss">> -> tls; 33 + <<"ws">> -> tcp; 34 + _ -> tls 35 + end, 36 + 37 + %% TLS options for secure connections 38 + TlsOpts = [{verify, verify_none}], %% For simplicity, disable cert verification 39 + %% In production, use proper CA certs 40 + 41 + %% Connection options 42 + Opts = case Transport of 43 + tls -> 44 + #{ 45 + transport => tls, 46 + tls_opts => TlsOpts, 47 + protocols => [http], 48 + retry => 10, 49 + retry_timeout => 1000 50 + }; 51 + tcp -> 52 + #{ 53 + transport => tcp, 54 + protocols => [http], 55 + retry => 10, 56 + retry_timeout => 1000 57 + } 58 + end, 59 + 60 + %% Convert host to list if needed 61 + HostStr = case is_binary(Host) of 62 + true -> binary_to_list(Host); 63 + false -> Host 64 + end, 65 + 66 + %% Ensure path with query is binary 67 + PathBin = case is_binary(PathWithQuery) of 68 + true -> PathWithQuery; 69 + false -> list_to_binary(PathWithQuery) 70 + end, 71 + 72 + %% Open connection 73 + case gun:open(HostStr, Port, Opts) of 74 + {ok, ConnPid} -> 75 + %% Monitor the connection 76 + MRef = monitor(process, ConnPid), 77 + 78 + %% Wait for connection 79 + receive 80 + {gun_up, ConnPid, _Protocol} -> 81 + %% Upgrade to WebSocket 82 + StreamRef = gun:ws_upgrade(ConnPid, binary_to_list(PathBin), []), 83 + 84 + %% Wait for upgrade 85 + receive 86 + {gun_upgrade, ConnPid, StreamRef, [<<"websocket">>], _Headers} -> 87 + %% Spawn a handler process to listen for WebSocket frames 88 + spawn(fun() -> handle_messages(ConnPid, StreamRef, HandlerPid) end), 89 + %% Return immediately so Gleam can continue 90 + {ok, ConnPid}; 91 + {gun_response, ConnPid, _, _, Status, Headers} -> 92 + gun:close(ConnPid), 93 + {error, {upgrade_failed, Status, Headers}}; 94 + {gun_error, ConnPid, _StreamRef, Reason} -> 95 + gun:close(ConnPid), 96 + {error, {gun_error, Reason}}; 97 + {'DOWN', MRef, process, ConnPid, Reason} -> 98 + {error, {connection_down, Reason}}; 99 + _Other -> 100 + gun:close(ConnPid), 101 + {error, unexpected_message} 102 + after 30000 -> 103 + gun:close(ConnPid), 104 + {error, upgrade_timeout} 105 + end; 106 + {'DOWN', MRef, process, ConnPid, Reason} -> 107 + {error, {connection_failed, Reason}}; 108 + _Other -> 109 + gun:close(ConnPid), 110 + {error, unexpected_message} 111 + after 30000 -> 112 + gun:close(ConnPid), 113 + {error, connection_timeout} 114 + end; 115 + {error, Reason} -> 116 + {error, {open_failed, Reason}} 117 + end. 118 + 119 + %% Handle incoming WebSocket messages 120 + handle_messages(ConnPid, StreamRef, HandlerPid) -> 121 + receive 122 + {gun_ws, ConnPid, StreamRef, {text, Text}} -> 123 + HandlerPid ! {ws_text, Text}, 124 + handle_messages(ConnPid, StreamRef, HandlerPid); 125 + {gun_ws, ConnPid, StreamRef, {binary, Binary}} -> 126 + HandlerPid ! {ws_binary, Binary}, 127 + handle_messages(ConnPid, StreamRef, HandlerPid); 128 + {gun_ws, ConnPid, StreamRef, close} -> 129 + HandlerPid ! {ws_closed, normal}, 130 + gun:close(ConnPid); 131 + {gun_down, ConnPid, _Protocol, Reason, _KilledStreams} -> 132 + HandlerPid ! {ws_error, Reason}, 133 + gun:close(ConnPid); 134 + {gun_error, ConnPid, StreamRef, Reason} -> 135 + HandlerPid ! {ws_error, Reason}, 136 + handle_messages(ConnPid, StreamRef, HandlerPid); 137 + stop -> 138 + gun:close(ConnPid) 139 + after 30000 -> 140 + %% Heartbeat every 30 seconds to keep connection alive 141 + handle_messages(ConnPid, StreamRef, HandlerPid) 142 + end.
+13
jetstream/test/jetstream_test.gleam
··· 1 + import gleeunit 2 + 3 + pub fn main() -> Nil { 4 + gleeunit.main() 5 + } 6 + 7 + // gleeunit test functions end in `_test` 8 + pub fn hello_world_test() { 9 + let name = "Joe" 10 + let greeting = "Hello, " <> name <> "!" 11 + 12 + assert greeting == "Hello, Joe!" 13 + }
+13
lexicon/.gitignore
··· 1 + *.beam 2 + *.ez 3 + /build 4 + erl_crash.dump 5 + 6 + # Rust build artifacts 7 + /native/lexicon_nif/target 8 + /native/lexicon_nif/Cargo.lock 9 + 10 + # Compiled NIF library (platform-specific binaries) 11 + /priv/*.so 12 + /priv/*.dylib 13 + /priv/*.dll
+47
lexicon/Makefile
··· 1 + .PHONY: all build clean test 2 + 3 + # Detect the operating system 4 + UNAME_S := $(shell uname -s) 5 + 6 + # Determine the library extension and target directory 7 + ifeq ($(UNAME_S),Linux) 8 + LIB_EXT = so 9 + LIB_NAME = liblexicon_nif.so 10 + endif 11 + ifeq ($(UNAME_S),Darwin) 12 + LIB_EXT = dylib 13 + LIB_NAME = liblexicon_nif.dylib 14 + endif 15 + ifeq ($(OS),Windows_NT) 16 + LIB_EXT = dll 17 + LIB_NAME = lexicon_nif.dll 18 + endif 19 + 20 + NATIVE_DIR = native/lexicon_nif 21 + TARGET_DIR = $(NATIVE_DIR)/target/release 22 + PRIV_DIR = priv 23 + 24 + all: build 25 + 26 + build: 27 + @echo "Building Rust NIF library..." 28 + cd $(NATIVE_DIR) && cargo build --release 29 + @echo "Creating priv directory..." 30 + mkdir -p $(PRIV_DIR) 31 + @echo "Copying library to priv/..." 32 + ifeq ($(UNAME_S),Darwin) 33 + cp $(TARGET_DIR)/$(LIB_NAME) $(PRIV_DIR)/liblexicon_nif.so 34 + else 35 + cp $(TARGET_DIR)/$(LIB_NAME) $(PRIV_DIR)/ 36 + endif 37 + @echo "Build complete! Library is in $(PRIV_DIR)/" 38 + 39 + clean: 40 + @echo "Cleaning build artifacts..." 41 + cd $(NATIVE_DIR) && cargo clean 42 + rm -rf $(PRIV_DIR) 43 + @echo "Clean complete!" 44 + 45 + test: build 46 + @echo "Running Gleam tests..." 47 + gleam test
+200
lexicon/README.md
··· 1 + # Lexicon - AT Protocol Schema Validation for Gleam 2 + 3 + A Gleam library for validating AT Protocol Lexicon schemas and data records, powered by the Rust [`slices-lexicon`](https://crates.io/crates/slices-lexicon) crate via Native Implemented Functions (NIFs). 4 + 5 + ## Features 6 + 7 + - Validate AT Protocol Lexicon schema documents 8 + - Validate data records against their schemas 9 + - Check NSID (Namespaced Identifier) validity 10 + - High-performance validation using native Rust code 11 + - Type-safe Gleam API 12 + 13 + ## Prerequisites 14 + 15 + - Rust toolchain (install via [rustup](https://rustup.rs/)) 16 + - Gleam compiler 17 + - Make (for build automation) 18 + 19 + ## Building 20 + 21 + The library requires building the Rust NIF before use: 22 + 23 + ```bash 24 + cd lexicon 25 + make build 26 + ``` 27 + 28 + This will: 29 + 1. Compile the Rust NIF library 30 + 2. Copy it to the `priv/` directory 31 + 3. Make it available for Gleam to load 32 + 33 + ### Platform-Specific Notes 34 + 35 + - **macOS**: The `.dylib` file is automatically renamed to `.so` for BEAM compatibility 36 + - **Linux**: Uses `.so` extension directly 37 + - **Windows**: Uses `.dll` extension 38 + 39 + ## Usage 40 + 41 + Add the lexicon library to your `gleam.toml`: 42 + 43 + ```toml 44 + [dependencies] 45 + lexicon = { path = "../lexicon" } 46 + ``` 47 + 48 + ### Example: Validating Lexicon Schemas 49 + 50 + ```gleam 51 + import lexicon 52 + import gleam/io 53 + 54 + pub fn main() { 55 + let schema_json = "{\"lexicon\": 1, \"id\": \"com.example.post\", ...}" 56 + 57 + case lexicon.validate_schemas([schema_json]) { 58 + Ok(Nil) -> io.println("Schema is valid!") 59 + Error(err) -> io.println("Validation failed: " <> lexicon.describe_error(err)) 60 + } 61 + } 62 + ``` 63 + 64 + ### Example: Validating Records 65 + 66 + ```gleam 67 + import lexicon 68 + 69 + pub fn validate_post(record_json: String, schema_json: String) { 70 + case lexicon.validate_record(record_json, schema_json) { 71 + Ok(Nil) -> { 72 + // Record is valid, safe to store in database 73 + store_record(record_json) 74 + } 75 + Error(err) -> { 76 + // Handle validation error 77 + log_error(lexicon.describe_error(err)) 78 + } 79 + } 80 + } 81 + ``` 82 + 83 + ### Example: Checking NSIDs 84 + 85 + ```gleam 86 + import lexicon 87 + import gleam/io 88 + 89 + pub fn check_collection_name(collection: String) { 90 + case lexicon.is_valid_nsid(collection) { 91 + True -> io.println("Valid NSID: " <> collection) 92 + False -> io.println("Invalid NSID: " <> collection) 93 + } 94 + } 95 + 96 + // Valid NSIDs 97 + check_collection_name("com.atproto.repo.createRecord") // Valid 98 + check_collection_name("app.bsky.feed.post") // Valid 99 + 100 + // Invalid NSIDs 101 + check_collection_name("invalid nsid") // Invalid 102 + check_collection_name("UPPERCASE.NOT.ALLOWED") // Invalid 103 + ``` 104 + 105 + ## API Reference 106 + 107 + ### `validate_schemas(json_strings: List(String)) -> Result(Nil, ValidationError)` 108 + 109 + Validates one or more lexicon schema documents. Returns `Ok(Nil)` if all schemas are valid. 110 + 111 + **Parameters:** 112 + - `json_strings`: List of JSON strings representing lexicon schemas 113 + 114 + **Returns:** 115 + - `Ok(Nil)`: All schemas are valid 116 + - `Error(ValidationError)`: One or more schemas failed validation 117 + 118 + ### `validate_record(record_json: String, schema_json: String) -> Result(Nil, ValidationError)` 119 + 120 + Validates a data record against its lexicon schema. 121 + 122 + **Parameters:** 123 + - `record_json`: JSON string of the record to validate 124 + - `schema_json`: JSON string of the lexicon schema 125 + 126 + **Returns:** 127 + - `Ok(Nil)`: Record is valid according to the schema 128 + - `Error(ValidationError)`: Record validation failed 129 + 130 + ### `is_valid_nsid(nsid: String) -> Bool` 131 + 132 + Checks if a string is a valid NSID (Namespaced Identifier). 133 + 134 + **Parameters:** 135 + - `nsid`: String to check 136 + 137 + **Returns:** 138 + - `True`: String is a valid NSID 139 + - `False`: String is not a valid NSID 140 + 141 + ### `describe_error(error: ValidationError) -> String` 142 + 143 + Converts a `ValidationError` to a human-readable string. 144 + 145 + ## Testing 146 + 147 + Run the test suite: 148 + 149 + ```bash 150 + make test 151 + ``` 152 + 153 + ## Development 154 + 155 + ### Project Structure 156 + 157 + ``` 158 + lexicon/ 159 + ├── gleam.toml # Gleam package configuration 160 + ├── Makefile # Build automation 161 + ├── README.md # This file 162 + ├── src/ 163 + │ ├── lexicon.gleam # Main Gleam API 164 + │ └── lexicon_nif.erl # Erlang NIF loader 165 + ├── native/ 166 + │ └── lexicon_nif/ # Rust NIF implementation 167 + │ ├── Cargo.toml 168 + │ └── src/ 169 + │ └── lib.rs 170 + └── priv/ # Compiled NIF library (created by build) 171 + ``` 172 + 173 + ### Cleaning 174 + 175 + To remove build artifacts: 176 + 177 + ```bash 178 + make clean 179 + ``` 180 + 181 + ## Important Notes 182 + 183 + ### NIF Safety 184 + 185 + Native Implemented Functions (NIFs) run in the same OS process as the BEAM VM. If a NIF crashes, it can bring down the entire runtime rather than just an isolated process. This library includes error handling to minimize this risk, but you should be aware of this limitation. 186 + 187 + ### Performance 188 + 189 + Because validation runs in native Rust code, it's significantly faster than a pure Erlang/Gleam implementation, making it suitable for validating large numbers of schemas or records. 190 + 191 + ## License 192 + 193 + This library uses the MIT-licensed `slices-lexicon` Rust crate. 194 + 195 + ## Resources 196 + 197 + - [AT Protocol Specification](https://atproto.com/) 198 + - [Lexicon Schema Language](https://atproto.com/specs/lexicon) 199 + - [slices-lexicon Rust Crate](https://crates.io/crates/slices-lexicon) 200 + - [Rustler Documentation](https://github.com/rusterlium/rustler)
+9
lexicon/gleam.toml
··· 1 + name = "lexicon" 2 + version = "0.1.0" 3 + 4 + [dependencies] 5 + gleam_stdlib = ">= 0.60.0 and < 1.0.0" 6 + gleam_json = ">= 3.0.2 and < 4.0.0" 7 + 8 + [dev-dependencies] 9 + gleeunit = ">= 1.0.0 and < 2.0.0"
+13
lexicon/manifest.toml
··· 1 + # This file was generated by Gleam 2 + # You typically do not need to edit this file 3 + 4 + packages = [ 5 + { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" }, 6 + { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" }, 7 + { name = "gleeunit", version = "1.6.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "FDC68A8C492B1E9B429249062CD9BAC9B5538C6FBF584817205D0998C42E1DAC" }, 8 + ] 9 + 10 + [requirements] 11 + gleam_json = { version = ">= 3.0.2 and < 4.0.0" } 12 + gleam_stdlib = { version = ">= 0.60.0 and < 1.0.0" } 13 + gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
+12
lexicon/native/lexicon_nif/Cargo.toml
··· 1 + [package] 2 + name = "lexicon_nif" 3 + version = "0.1.0" 4 + edition = "2021" 5 + 6 + [lib] 7 + crate-type = ["dylib"] 8 + 9 + [dependencies] 10 + rustler = "0.35" 11 + slices-lexicon = "0.3.0" 12 + serde_json = "1.0"
+60
lexicon/native/lexicon_nif/src/lib.rs
··· 1 + use serde_json::Value; 2 + 3 + // NIF function to validate multiple lexicon schema documents 4 + #[rustler::nif] 5 + fn validate_schemas(json_strings: Vec<String>) -> Result<String, String> { 6 + // Parse JSON strings into serde_json Values 7 + let mut lexicons: Vec<Value> = Vec::new(); 8 + 9 + for json_str in json_strings { 10 + let lexicon: Value = 11 + serde_json::from_str(&json_str).map_err(|e| format!("Failed to parse JSON: {}", e))?; 12 + lexicons.push(lexicon); 13 + } 14 + 15 + // Validate the lexicons using slices-lexicon 16 + match slices_lexicon::validate(lexicons) { 17 + Ok(_) => Ok("ok".to_string()), 18 + Err(errors) => { 19 + // Convert the error HashMap to a JSON string for easier handling in Gleam 20 + let error_json = serde_json::to_string(&errors) 21 + .unwrap_or_else(|_| "Failed to serialize validation errors".to_string()); 22 + Err(error_json) 23 + } 24 + } 25 + } 26 + 27 + // NIF function to validate a record against lexicon schemas 28 + #[rustler::nif] 29 + fn validate_record( 30 + lexicon_jsons: Vec<String>, 31 + collection: String, 32 + record_json: String, 33 + ) -> Result<String, String> { 34 + // Parse lexicon JSON strings into serde_json Values 35 + let mut lexicons: Vec<Value> = Vec::new(); 36 + for json_str in lexicon_jsons { 37 + let lexicon: Value = serde_json::from_str(&json_str) 38 + .map_err(|e| format!("Failed to parse lexicon JSON: {}", e))?; 39 + lexicons.push(lexicon); 40 + } 41 + 42 + // Parse the record JSON 43 + let record: Value = serde_json::from_str(&record_json) 44 + .map_err(|e| format!("Failed to parse record JSON: {}", e))?; 45 + 46 + // Validate the record against the lexicon schemas 47 + match slices_lexicon::validate_record(lexicons, &collection, record) { 48 + Ok(_) => Ok("ok".to_string()), 49 + Err(error) => Err(format!("{:?}", error)), 50 + } 51 + } 52 + 53 + // NIF function to check if a string is a valid NSID 54 + #[rustler::nif] 55 + fn is_valid_nsid(nsid: String) -> bool { 56 + slices_lexicon::is_valid_nsid(&nsid) 57 + } 58 + 59 + // Initialize the NIF module 60 + rustler::init!("lexicon_nif");
+94
lexicon/src/lexicon.gleam
··· 1 + /// Lexicon validation library using Rust's slices-lexicon via NIF 2 + /// 3 + /// This module provides type-safe wrappers around the Rust NIF implementation 4 + /// for validating AT Protocol Lexicon schemas and data records. 5 + /// Result type for validation errors 6 + pub type ValidationError { 7 + ParseError(message: String) 8 + ValidationError(message: String) 9 + } 10 + 11 + /// Validates multiple lexicon schema documents 12 + /// 13 + /// Takes a list of JSON strings representing lexicon schemas and validates them. 14 + /// Returns Ok(Nil) if all schemas are valid, or Error with validation details. 15 + /// 16 + /// ## Example 17 + /// 18 + /// ```gleam 19 + /// let schemas = [schema_json1, schema_json2] 20 + /// case validate_schemas(schemas) { 21 + /// Ok(Nil) -> io.println("All schemas valid!") 22 + /// Error(err) -> io.println("Validation failed: " <> describe_error(err)) 23 + /// } 24 + /// ``` 25 + pub fn validate_schemas( 26 + json_strings: List(String), 27 + ) -> Result(Nil, ValidationError) { 28 + case do_validate_schemas(json_strings) { 29 + Ok(_) -> Ok(Nil) 30 + Error(msg) -> Error(ValidationError(msg)) 31 + } 32 + } 33 + 34 + /// Validates a single data record against lexicon schemas 35 + /// 36 + /// Takes lexicon schemas, a collection name, and the record JSON to validate. 37 + /// 38 + /// ## Example 39 + /// 40 + /// ```gleam 41 + /// let schemas = [schema_json] 42 + /// case validate_record(schemas, "app.bsky.feed.post", record_json) { 43 + /// Ok(Nil) -> io.println("Record is valid!") 44 + /// Error(err) -> io.println("Invalid record: " <> describe_error(err)) 45 + /// } 46 + /// ``` 47 + pub fn validate_record( 48 + lexicon_jsons: List(String), 49 + collection: String, 50 + record_json: String, 51 + ) -> Result(Nil, ValidationError) { 52 + case do_validate_record(lexicon_jsons, collection, record_json) { 53 + Ok(_) -> Ok(Nil) 54 + Error(msg) -> Error(ValidationError(msg)) 55 + } 56 + } 57 + 58 + /// Checks if a string is a valid NSID (Namespaced Identifier) 59 + /// 60 + /// NSIDs are used throughout the AT Protocol to identify lexicons, collections, 61 + /// and other namespaced resources. 62 + /// 63 + /// ## Example 64 + /// 65 + /// ```gleam 66 + /// is_valid_nsid("com.atproto.repo.createRecord") // True 67 + /// is_valid_nsid("invalid nsid") // False 68 + /// ``` 69 + pub fn is_valid_nsid(nsid: String) -> Bool { 70 + do_is_valid_nsid(nsid) 71 + } 72 + 73 + /// Converts a ValidationError to a human-readable string 74 + pub fn describe_error(error: ValidationError) -> String { 75 + case error { 76 + ParseError(msg) -> "Parse error: " <> msg 77 + ValidationError(msg) -> "Validation error: " <> msg 78 + } 79 + } 80 + 81 + // External NIF function declarations 82 + 83 + @external(erlang, "lexicon_nif", "validate_schemas") 84 + fn do_validate_schemas(json_strings: List(String)) -> Result(String, String) 85 + 86 + @external(erlang, "lexicon_nif", "validate_record") 87 + fn do_validate_record( 88 + lexicon_jsons: List(String), 89 + collection: String, 90 + record_json: String, 91 + ) -> Result(String, String) 92 + 93 + @external(erlang, "lexicon_nif", "is_valid_nsid") 94 + fn do_is_valid_nsid(nsid: String) -> Bool
+27
lexicon/src/lexicon_nif.erl
··· 1 + -module(lexicon_nif). 2 + -export([validate_schemas/1, validate_record/3, is_valid_nsid/1]). 3 + -nifs([validate_schemas/1, validate_record/3, is_valid_nsid/1]). 4 + -on_load(init/0). 5 + 6 + init() -> 7 + PrivDir = case code:priv_dir(lexicon) of 8 + {error, _} -> 9 + % Fallback for development 10 + case filelib:is_dir(filename:join(["..", priv])) of 11 + true -> filename:join(["..", priv]); 12 + _ -> "priv" 13 + end; 14 + Dir -> 15 + Dir 16 + end, 17 + SoName = filename:join(PrivDir, "liblexicon_nif"), 18 + ok = erlang:load_nif(SoName, 0). 19 + 20 + validate_schemas(_JsonStrings) -> 21 + exit(nif_library_not_loaded). 22 + 23 + validate_record(_LexiconJsons, _Collection, _RecordJson) -> 24 + exit(nif_library_not_loaded). 25 + 26 + is_valid_nsid(_Nsid) -> 27 + exit(nif_library_not_loaded).
+93
lexicon/test/lexicon_test.gleam
··· 1 + import gleeunit 2 + import gleeunit/should 3 + import lexicon 4 + 5 + pub fn main() { 6 + gleeunit.main() 7 + } 8 + 9 + // Test NSID validation - this is the simplest test to verify NIF is working 10 + pub fn valid_nsid_test() { 11 + // Valid NSIDs 12 + lexicon.is_valid_nsid("com.atproto.repo.createRecord") 13 + |> should.be_true 14 + } 15 + 16 + pub fn valid_nsid_simple_test() { 17 + lexicon.is_valid_nsid("app.bsky.feed.post") 18 + |> should.be_true 19 + } 20 + 21 + pub fn invalid_nsid_with_space_test() { 22 + // Invalid NSID - contains space 23 + lexicon.is_valid_nsid("invalid nsid") 24 + |> should.be_false 25 + } 26 + 27 + pub fn invalid_nsid_empty_test() { 28 + // Invalid NSID - empty string 29 + lexicon.is_valid_nsid("") 30 + |> should.be_false 31 + } 32 + 33 + // Test validating a valid record against a lexicon schema 34 + pub fn validate_valid_record_test() { 35 + let schema = 36 + "{ 37 + \"lexicon\": 1, 38 + \"id\": \"com.example.post\", 39 + \"defs\": { 40 + \"main\": { 41 + \"type\": \"record\", 42 + \"key\": \"tid\", 43 + \"record\": { 44 + \"type\": \"object\", 45 + \"required\": [\"text\"], 46 + \"properties\": { 47 + \"text\": { 48 + \"type\": \"string\", 49 + \"maxLength\": 300 50 + } 51 + } 52 + } 53 + } 54 + } 55 + }" 56 + 57 + let record = 58 + "{ 59 + \"text\": \"Hello, world!\" 60 + }" 61 + 62 + lexicon.validate_record([schema], "com.example.post", record) 63 + |> should.be_ok 64 + } 65 + 66 + // Test validating an invalid record (missing required field) 67 + pub fn validate_invalid_record_missing_field_test() { 68 + let schema = 69 + "{ 70 + \"lexicon\": 1, 71 + \"id\": \"com.example.post\", 72 + \"defs\": { 73 + \"main\": { 74 + \"type\": \"record\", 75 + \"key\": \"tid\", 76 + \"record\": { 77 + \"type\": \"object\", 78 + \"required\": [\"text\"], 79 + \"properties\": { 80 + \"text\": { 81 + \"type\": \"string\" 82 + } 83 + } 84 + } 85 + } 86 + } 87 + }" 88 + 89 + let record = "{}" 90 + 91 + lexicon.validate_record([schema], "com.example.post", record) 92 + |> should.be_error 93 + }
+23
lexicon_graphql/.github/workflows/test.yml
··· 1 + name: test 2 + 3 + on: 4 + push: 5 + branches: 6 + - master 7 + - main 8 + pull_request: 9 + 10 + jobs: 11 + test: 12 + runs-on: ubuntu-latest 13 + steps: 14 + - uses: actions/checkout@v4 15 + - uses: erlef/setup-beam@v1 16 + with: 17 + otp-version: "28" 18 + gleam-version: "1.13.0" 19 + rebar3-version: "3" 20 + # elixir-version: "1" 21 + - run: gleam deps download 22 + - run: gleam test 23 + - run: gleam format --check src test
+4
lexicon_graphql/.gitignore
··· 1 + *.beam 2 + *.ez 3 + /build 4 + erl_crash.dump
+240
lexicon_graphql/README.md
··· 1 + # Lexicon GraphQL 2 + 3 + Automatic GraphQL schema generation from AT Protocol Lexicon definitions. This package bridges AT Protocol's Lexicon schema system with GraphQL, enabling automatic GraphQL API generation for ATProto records. 4 + 5 + ## Features 6 + 7 + ### Automatic Schema Generation 8 + - **Lexicon Parsing**: Parses AT Protocol lexicon JSON files 9 + - **Type Mapping**: Automatically maps Lexicon types to GraphQL types 10 + - **Database Integration**: Generates GraphQL schemas that query database records 11 + - **Field Resolvers**: Auto-generated resolvers for lexicon properties 12 + 13 + ### Supported Lexicon Types 14 + - `string` → GraphQL String 15 + - `integer` → GraphQL Int 16 + - `boolean` → GraphQL Boolean 17 + - `datetime` → GraphQL String (ISO 8601 format) 18 + - Objects and nested properties 19 + - Arrays/lists 20 + 21 + ### Database Schema Builder 22 + Generates GraphQL schemas for database-stored ATProto records with: 23 + - Automatic field extraction from database records 24 + - Support for nested lexicon properties 25 + - Proper JSON parsing and field access 26 + - Metadata fields (uri, cid, did, collection, indexedAt) 27 + 28 + ## Architecture 29 + 30 + The package consists of several modules: 31 + 32 + - `lexicon_graphql/lexicon_parser.gleam` - Parses lexicon JSON files 33 + - `lexicon_graphql/type_mapper.gleam` - Maps lexicon types to GraphQL types 34 + - `lexicon_graphql/schema_builder.gleam` - Builds GraphQL schemas from lexicons 35 + - `lexicon_graphql/db_schema_builder.gleam` - Database-specific schema generation 36 + - `lexicon_graphql/ref_resolver.gleam` - Resolves lexicon references 37 + - `lexicon_graphql/nsid.gleam` - NSID (Namespaced Identifier) utilities 38 + 39 + ## Usage 40 + 41 + ### Creating a Schema from a Lexicon 42 + 43 + ```gleam 44 + import lexicon_graphql 45 + import lexicon_graphql/db_schema_builder 46 + import graphql/schema 47 + 48 + // Parse a lexicon file 49 + let lexicon_json = "{ \"lexicon\": 1, \"id\": \"xyz.statusphere.status\", ... }" 50 + let assert Ok(lexicon) = lexicon_graphql.parse_lexicon(lexicon_json) 51 + 52 + // Generate GraphQL schema for database queries 53 + let collection_name = "xyz.statusphere.status" 54 + let graphql_type = db_schema_builder.build_db_record_type( 55 + collection_name, 56 + lexicon, 57 + get_records_fn 58 + ) 59 + ``` 60 + 61 + ### Example: Status Record Schema 62 + 63 + For a lexicon like: 64 + ```json 65 + { 66 + "lexicon": 1, 67 + "id": "xyz.statusphere.status", 68 + "defs": { 69 + "main": { 70 + "type": "record", 71 + "record": { 72 + "type": "object", 73 + "properties": { 74 + "status": { "type": "string" }, 75 + "createdAt": { "type": "string", "format": "datetime" } 76 + } 77 + } 78 + } 79 + } 80 + } 81 + ``` 82 + 83 + The package automatically generates a GraphQL type with: 84 + ```graphql 85 + type XyzStatusphereStatus { 86 + uri: String! 87 + cid: String! 88 + did: String! 89 + collection: String! 90 + indexedAt: String! 91 + status: String 92 + createdAt: String 93 + } 94 + ``` 95 + 96 + ## Database Integration 97 + 98 + ### Record Structure 99 + 100 + Records in the database have the following structure: 101 + - `uri`: AT URI of the record 102 + - `cid`: Content identifier 103 + - `did`: DID of the record owner 104 + - `collection`: Lexicon collection name 105 + - `json`: **JSON string** containing the record value 106 + - `indexed_at`: When the record was indexed 107 + 108 + ### JSON Storage Format 109 + 110 + **IMPORTANT**: The `json` field MUST be stored as a proper JSON string, not Gleam/Erlang term syntax. 111 + 112 + CORRECT: `{"$type":"xyz.statusphere.status","status":"..","createdAt":"2025-10-28T20:00:00Z"}` 113 + 114 + INCORRECT: `dict.from_list([#("status", ".."), #("createdAt", "2025-10-28T20:00:00Z")])` 115 + 116 + ### Data Conversion 117 + 118 + When storing records from Jetstream or backfill operations, always use proper JSON encoding: 119 + 120 + ```gleam 121 + import gleam/dynamic.{type Dynamic} 122 + 123 + // Convert Dynamic (Erlang term) to JSON string 124 + fn dynamic_to_json(value: Dynamic) -> String { 125 + let iolist = json_encode(value) 126 + iolist_to_string(iolist) 127 + } 128 + 129 + @external(erlang, "json", "encode") 130 + fn json_encode(value: Dynamic) -> Dynamic 131 + ``` 132 + 133 + **Do NOT use** `string.inspect(value)` as it produces Gleam syntax, not JSON. 134 + 135 + ## Field Resolution 136 + 137 + The `db_schema_builder` module provides helper functions for extracting fields from context: 138 + 139 + ```gleam 140 + // Get a top-level field from context 141 + get_field_from_context(ctx, "fieldName") 142 + 143 + // Get a nested field from context 144 + get_nested_field_from_context(ctx, "parent", "child") 145 + ``` 146 + 147 + These functions handle: 148 + - Safe field access with Result types 149 + - Null handling 150 + - Type checking 151 + - Nested object traversal 152 + 153 + ## Testing 154 + 155 + The package uses the `graphql` package's test suite to verify schema generation and execution. 156 + 157 + ## Dependencies 158 + 159 + - `gleam_stdlib` >= 0.44.0 160 + - `gleam_json` >= 3.0.0 161 + - `graphql` (local package) 162 + 163 + ## Integration Example 164 + 165 + ```gleam 166 + import lexicon_graphql/db_schema_builder 167 + import database 168 + import graphql/schema 169 + import graphql/executor 170 + 171 + // Load lexicon 172 + let lexicon_json = load_lexicon("priv/lexicons/xyz/statusphere/status.json") 173 + let assert Ok(lexicon) = lexicon_graphql.parse_lexicon(lexicon_json) 174 + 175 + // Define record fetcher 176 + fn get_records() { 177 + database.get_records_by_collection(db, "xyz.statusphere.status") 178 + |> result.map(fn(records) { 179 + list.map(records, record_to_graphql_value) 180 + }) 181 + } 182 + 183 + // Build GraphQL type 184 + let status_type = db_schema_builder.build_db_record_type( 185 + "xyz.statusphere.status", 186 + lexicon, 187 + get_records 188 + ) 189 + 190 + // Create query type 191 + let query_type = schema.object_type("Query", "Root query", [ 192 + schema.field( 193 + "statuses", 194 + schema.list_type(status_type), 195 + "Get all statuses", 196 + fn(_) { get_records() } 197 + ) 198 + ]) 199 + 200 + // Create and use schema 201 + let graphql_schema = schema.new(query_type) 202 + executor.execute("{ statuses { status } }", graphql_schema, schema.Context(None)) 203 + ``` 204 + 205 + ## NSID Support 206 + 207 + The package includes utilities for working with NSIDs (Namespaced Identifiers): 208 + 209 + ```gleam 210 + import lexicon_graphql/nsid 211 + 212 + // Convert NSID to GraphQL type name 213 + nsid.to_graphql_name("xyz.statusphere.status") 214 + // → "XyzStatusphereStatus" 215 + 216 + // Convert NSID to field name 217 + nsid.to_graphql_field_name("xyz.statusphere.status") 218 + // → "xyzStatusphereStatus" 219 + ``` 220 + 221 + ## Development 222 + 223 + Run tests: 224 + ```sh 225 + cd lexicon_graphql 226 + gleam test 227 + ``` 228 + 229 + Build: 230 + ```sh 231 + gleam build 232 + ``` 233 + 234 + ## Future Enhancements 235 + 236 + - Support for lexicon references ($ref) 237 + - Union types 238 + - Custom validation rules 239 + - Mutation support for creating/updating records 240 + - Subscription support for real-time updates
+21
lexicon_graphql/gleam.toml
··· 1 + name = "lexicon_graphql" 2 + version = "1.0.0" 3 + 4 + # Fill out these fields if you intend to generate HTML documentation or publish 5 + # your project to the Hex package manager. 6 + # 7 + # description = "" 8 + # licences = ["Apache-2.0"] 9 + # repository = { type = "github", user = "", repo = "" } 10 + # links = [{ title = "Website", href = "" }] 11 + # 12 + # For a full reference of all the available options, you can have a look at 13 + # https://gleam.run/writing-gleam/gleam-toml/. 14 + 15 + [dependencies] 16 + gleam_stdlib = ">= 0.44.0 and < 2.0.0" 17 + gleam_json = ">= 3.0.0 and < 4.0.0" 18 + graphql = {path = "../graphql"} 19 + 20 + [dev-dependencies] 21 + gleeunit = ">= 1.0.0 and < 2.0.0"
+15
lexicon_graphql/manifest.toml
··· 1 + # This file was generated by Gleam 2 + # You typically do not need to edit this file 3 + 4 + packages = [ 5 + { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" }, 6 + { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" }, 7 + { name = "gleeunit", version = "1.7.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "CD701726CBCE5588B375D157B4391CFD0F2F134CD12D9B6998A395484DE05C58" }, 8 + { name = "graphql", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], source = "local", path = "../graphql" }, 9 + ] 10 + 11 + [requirements] 12 + gleam_json = { version = ">= 3.0.0 and < 4.0.0" } 13 + gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" } 14 + gleeunit = { version = ">= 1.0.0 and < 2.0.0" } 15 + graphql = { path = "../graphql" }
+5
lexicon_graphql/src/lexicon_graphql.gleam
··· 1 + import gleam/io 2 + 3 + pub fn main() -> Nil { 4 + io.println("Hello from lexicon_graphql!") 5 + }
+221
lexicon_graphql/src/lexicon_graphql/db_schema_builder.gleam
··· 1 + /// Database Schema Builder 2 + /// 3 + /// Builds GraphQL schemas from AT Protocol lexicon definitions with database-backed resolvers. 4 + /// This extends the base schema_builder with actual data resolution. 5 + import gleam/list 6 + import gleam/option 7 + import gleam/result 8 + import graphql/schema 9 + import graphql/value 10 + import lexicon_graphql/nsid 11 + import lexicon_graphql/schema_builder 12 + import lexicon_graphql/type_mapper 13 + 14 + /// Record type metadata with database resolver info 15 + type RecordType { 16 + RecordType( 17 + nsid: String, 18 + type_name: String, 19 + field_name: String, 20 + fields: List(schema.Field), 21 + ) 22 + } 23 + 24 + /// Type for a database record fetcher function 25 + /// Takes a collection NSID and returns a list of records as GraphQL values 26 + pub type RecordFetcher = 27 + fn(String) -> Result(List(value.Value), String) 28 + 29 + /// Build a GraphQL schema from lexicons with database-backed resolvers 30 + /// 31 + /// The fetcher parameter should be a function that queries the database for records 32 + pub fn build_schema_with_fetcher( 33 + lexicons: List(schema_builder.Lexicon), 34 + fetcher: RecordFetcher, 35 + ) -> Result(schema.Schema, String) { 36 + case lexicons { 37 + [] -> Error("Cannot build schema from empty lexicon list") 38 + _ -> { 39 + // Extract record types from lexicons 40 + let record_types = extract_record_types(lexicons) 41 + 42 + // Build the query type with fields for each record 43 + let query_type = build_query_type(record_types, fetcher) 44 + 45 + // Create the schema 46 + Ok(schema.schema(query_type, option.None)) 47 + } 48 + } 49 + } 50 + 51 + /// Extract record types from lexicon definitions 52 + fn extract_record_types( 53 + lexicons: List(schema_builder.Lexicon), 54 + ) -> List(RecordType) { 55 + lexicons 56 + |> list.filter_map(parse_lexicon) 57 + } 58 + 59 + /// Parse a single lexicon into a RecordType 60 + fn parse_lexicon(lexicon: schema_builder.Lexicon) -> Result(RecordType, Nil) { 61 + case lexicon { 62 + schema_builder.Lexicon( 63 + id, 64 + schema_builder.Defs(schema_builder.RecordDef("record", properties)), 65 + ) -> { 66 + let type_name = nsid.to_type_name(id) 67 + let field_name = nsid.to_field_name(id) 68 + let fields = build_fields(properties) 69 + 70 + Ok(RecordType( 71 + nsid: id, 72 + type_name: type_name, 73 + field_name: field_name, 74 + fields: fields, 75 + )) 76 + } 77 + _ -> Error(Nil) 78 + } 79 + } 80 + 81 + /// Build GraphQL fields from lexicon properties 82 + fn build_fields( 83 + properties: List(#(String, schema_builder.Property)), 84 + ) -> List(schema.Field) { 85 + // Add standard AT Proto fields 86 + let standard_fields = [ 87 + schema.field( 88 + "uri", 89 + schema.string_type(), 90 + "Record URI", 91 + fn(ctx: schema.Context) { 92 + // Extract URI from the record data 93 + case get_field_from_context(ctx, "uri") { 94 + Ok(uri) -> Ok(value.String(uri)) 95 + Error(_) -> Ok(value.String("")) 96 + } 97 + }, 98 + ), 99 + schema.field("cid", schema.string_type(), "Record CID", fn(ctx) { 100 + case get_field_from_context(ctx, "cid") { 101 + Ok(cid) -> Ok(value.String(cid)) 102 + Error(_) -> Ok(value.String("")) 103 + } 104 + }), 105 + schema.field("did", schema.string_type(), "DID of record author", fn(ctx) { 106 + case get_field_from_context(ctx, "did") { 107 + Ok(did) -> Ok(value.String(did)) 108 + Error(_) -> Ok(value.String("")) 109 + } 110 + }), 111 + schema.field("collection", schema.string_type(), "Collection name", fn(ctx) { 112 + case get_field_from_context(ctx, "collection") { 113 + Ok(collection) -> Ok(value.String(collection)) 114 + Error(_) -> Ok(value.String("")) 115 + } 116 + }), 117 + schema.field( 118 + "indexedAt", 119 + schema.string_type(), 120 + "When record was indexed", 121 + fn(ctx) { 122 + case get_field_from_context(ctx, "indexedAt") { 123 + Ok(indexed_at) -> Ok(value.String(indexed_at)) 124 + Error(_) -> Ok(value.String("")) 125 + } 126 + }, 127 + ), 128 + ] 129 + 130 + // Build fields from lexicon properties 131 + let lexicon_fields = 132 + list.map(properties, fn(prop) { 133 + let #(name, schema_builder.Property(type_, _required)) = prop 134 + let graphql_type = type_mapper.map_type(type_) 135 + 136 + schema.field(name, graphql_type, "Field from lexicon", fn(ctx) { 137 + // Try to extract field from the value object in context 138 + case get_nested_field_from_context(ctx, "value", name) { 139 + Ok(val) -> Ok(value.String(val)) 140 + Error(_) -> Ok(value.Null) 141 + } 142 + }) 143 + }) 144 + 145 + // Combine standard and lexicon fields 146 + list.append(standard_fields, lexicon_fields) 147 + } 148 + 149 + /// Build the root Query type with fields for each record type 150 + fn build_query_type( 151 + record_types: List(RecordType), 152 + fetcher: RecordFetcher, 153 + ) -> schema.Type { 154 + let query_fields = 155 + list.map(record_types, fn(record_type) { 156 + // Build the object type for this record 157 + let object_type = 158 + schema.object_type( 159 + record_type.type_name, 160 + "Record type: " <> record_type.nsid, 161 + record_type.fields, 162 + ) 163 + 164 + // Create a list type for the query field 165 + let list_type = schema.list_type(object_type) 166 + 167 + // Create query field that returns a list of this record type 168 + // Capture the nsid and fetcher in the closure 169 + let collection_nsid = record_type.nsid 170 + schema.field( 171 + record_type.field_name, 172 + list_type, 173 + "Query " <> record_type.nsid, 174 + fn(_ctx: schema.Context) { 175 + // Call the fetcher function to get records from database 176 + fetcher(collection_nsid) 177 + |> result.map(fn(records) { value.List(records) }) 178 + }, 179 + ) 180 + }) 181 + 182 + schema.object_type("Query", "Root query type", query_fields) 183 + } 184 + 185 + /// Helper to extract a field value from resolver context 186 + fn get_field_from_context( 187 + ctx: schema.Context, 188 + field_name: String, 189 + ) -> Result(String, Nil) { 190 + case ctx.data { 191 + option.Some(value.Object(fields)) -> { 192 + case list.key_find(fields, field_name) { 193 + Ok(value.String(val)) -> Ok(val) 194 + _ -> Error(Nil) 195 + } 196 + } 197 + _ -> Error(Nil) 198 + } 199 + } 200 + 201 + /// Helper to extract a nested field value from resolver context 202 + fn get_nested_field_from_context( 203 + ctx: schema.Context, 204 + parent_field: String, 205 + field_name: String, 206 + ) -> Result(String, Nil) { 207 + case ctx.data { 208 + option.Some(value.Object(fields)) -> { 209 + case list.key_find(fields, parent_field) { 210 + Ok(value.Object(nested_fields)) -> { 211 + case list.key_find(nested_fields, field_name) { 212 + Ok(value.String(val)) -> Ok(val) 213 + _ -> Error(Nil) 214 + } 215 + } 216 + _ -> Error(Nil) 217 + } 218 + } 219 + _ -> Error(Nil) 220 + } 221 + }
+83
lexicon_graphql/src/lexicon_graphql/lexicon_parser.gleam
··· 1 + /// Lexicon JSON Parser 2 + /// 3 + /// Parses AT Protocol lexicon JSON documents into structured Lexicon types 4 + /// that can be used with the schema builder. 5 + import gleam/dict 6 + import gleam/dynamic/decode 7 + import gleam/json 8 + import gleam/list 9 + import gleam/result 10 + import lexicon_graphql/schema_builder 11 + 12 + /// Parse a lexicon JSON string into a Lexicon type 13 + pub fn parse_lexicon(json_str: String) -> Result(schema_builder.Lexicon, String) { 14 + // Decode the JSON into a structured format using continuation-passing style 15 + let decoder = { 16 + use id <- decode.field("id", decode.string) 17 + use defs <- decode.field("defs", decode_defs()) 18 + decode.success(schema_builder.Lexicon(id:, defs:)) 19 + } 20 + 21 + json.parse(json_str, decoder) 22 + |> result.map_error(fn(_) { "Failed to parse lexicon JSON" }) 23 + } 24 + 25 + /// Create a decoder for the defs object 26 + fn decode_defs() -> decode.Decoder(schema_builder.Defs) { 27 + use main <- decode.field("main", decode_record_def()) 28 + decode.success(schema_builder.Defs(main:)) 29 + } 30 + 31 + /// Create a decoder for a record definition 32 + fn decode_record_def() -> decode.Decoder(schema_builder.RecordDef) { 33 + use type_ <- decode.field("type", decode.string) 34 + use record <- decode.field("record", decode_record_object()) 35 + decode.success(schema_builder.RecordDef(type_:, properties: record)) 36 + } 37 + 38 + /// Create a decoder for the record object which contains properties 39 + fn decode_record_object() -> decode.Decoder( 40 + List(#(String, schema_builder.Property)), 41 + ) { 42 + // This is more complex - we need to decode a dict of properties 43 + use properties_dict <- decode.field( 44 + "properties", 45 + decode.dict(decode.string, decode.dynamic), 46 + ) 47 + use required_list <- decode.optional_field( 48 + "required", 49 + [], 50 + decode.list(decode.string), 51 + ) 52 + 53 + // Convert dict to list of properties 54 + let properties = 55 + properties_dict 56 + |> dict.to_list 57 + |> list.map(fn(entry) { 58 + let #(name, prop_dyn) = entry 59 + let is_required = list.contains(required_list, name) 60 + 61 + // Extract the type from the property 62 + let prop_type = case decode_property_type(prop_dyn) { 63 + Ok(type_) -> type_ 64 + Error(_) -> "string" 65 + // Default fallback 66 + } 67 + 68 + #(name, schema_builder.Property(prop_type, is_required)) 69 + }) 70 + 71 + decode.success(properties) 72 + } 73 + 74 + /// Decode a property's type field 75 + fn decode_property_type( 76 + dyn: decode.Dynamic, 77 + ) -> Result(String, List(decode.DecodeError)) { 78 + let type_decoder = { 79 + use type_ <- decode.field("type", decode.string) 80 + decode.success(type_) 81 + } 82 + decode.run(dyn, type_decoder) 83 + }
+65
lexicon_graphql/src/lexicon_graphql/nsid.gleam
··· 1 + /// NSID (Namespaced Identifier) utilities 2 + /// 3 + /// NSIDs are used throughout AT Protocol to identify lexicons, collections, 4 + /// and other namespaced resources. They follow the format: "domain.name.thing" 5 + /// 6 + /// This module provides utilities for converting NSIDs to GraphQL-friendly names. 7 + import gleam/list 8 + import gleam/result 9 + import gleam/string 10 + 11 + /// Converts an NSID to a GraphQL type name (PascalCase). 12 + /// 13 + /// ## Examples 14 + /// 15 + /// ```gleam 16 + /// to_type_name("xyz.statusphere.status") // "XyzStatusphereStatus" 17 + /// to_type_name("app.bsky.feed.post") // "AppBskyFeedPost" 18 + /// ``` 19 + pub fn to_type_name(nsid: String) -> String { 20 + nsid 21 + |> string.split(".") 22 + |> list.map(capitalize_first) 23 + |> string.join("") 24 + } 25 + 26 + /// Converts an NSID to a GraphQL field name (camelCase). 27 + /// 28 + /// ## Examples 29 + /// 30 + /// ```gleam 31 + /// to_field_name("xyz.statusphere.status") // "xyzStatusphereStatus" 32 + /// to_field_name("app.bsky.feed.post") // "appBskyFeedPost" 33 + /// ``` 34 + pub fn to_field_name(nsid: String) -> String { 35 + case string.split(nsid, ".") { 36 + [] -> nsid 37 + [first, ..rest] -> { 38 + let capitalized_rest = list.map(rest, capitalize_first) 39 + string.join([first, ..capitalized_rest], "") 40 + } 41 + } 42 + } 43 + 44 + /// Extracts the collection name from an NSID (last segment). 45 + /// 46 + /// ## Examples 47 + /// 48 + /// ```gleam 49 + /// to_collection_name("xyz.statusphere.status") // "status" 50 + /// to_collection_name("app.bsky.feed.post") // "post" 51 + /// ``` 52 + pub fn to_collection_name(nsid: String) -> String { 53 + nsid 54 + |> string.split(".") 55 + |> list.last 56 + |> result.unwrap("") 57 + } 58 + 59 + /// Capitalizes the first letter of a string. 60 + fn capitalize_first(s: String) -> String { 61 + case string.pop_grapheme(s) { 62 + Ok(#(first, rest)) -> string.uppercase(first) <> rest 63 + Error(_) -> s 64 + } 65 + }
+55
lexicon_graphql/src/lexicon_graphql/ref_resolver.gleam
··· 1 + /// Lexicon Reference Resolver 2 + /// 3 + /// Resolves ref types in lexicon definitions to their actual types. 4 + /// Handles both local references (within same lexicon) and external references. 5 + /// 6 + /// Reference URI format: 7 + /// - "nsid" - references the main definition of that NSID 8 + /// - "nsid#fragment" - references a specific definition within that NSID 9 + import gleam/list 10 + import gleam/string 11 + import lexicon_graphql/schema_builder 12 + 13 + /// Parse a reference URI into NSID and definition name 14 + /// 15 + /// ## Examples 16 + /// 17 + /// ```gleam 18 + /// parse_ref_uri("xyz.statusphere.profile") 19 + /// // #("xyz.statusphere.profile", "main") 20 + /// 21 + /// parse_ref_uri("xyz.statusphere.post#embed") 22 + /// // #("xyz.statusphere.post", "embed") 23 + /// ``` 24 + pub fn parse_ref_uri(ref_uri: String) -> #(String, String) { 25 + case string.split(ref_uri, "#") { 26 + [nsid] -> #(nsid, "main") 27 + [nsid, fragment] -> #(nsid, fragment) 28 + _ -> #(ref_uri, "main") 29 + } 30 + } 31 + 32 + /// Resolves a reference URI to the actual lexicon definition 33 + /// 34 + /// Returns the NSID of the referenced type if found, Error if not found 35 + pub fn resolve_ref( 36 + ref_uri: String, 37 + lexicons: List(schema_builder.Lexicon), 38 + ) -> Result(String, String) { 39 + let #(nsid, _fragment) = parse_ref_uri(ref_uri) 40 + 41 + // Look for the lexicon with this NSID 42 + case find_lexicon(nsid, lexicons) { 43 + Ok(_lexicon) -> Ok(nsid) 44 + Error(Nil) -> Error("Reference not found: " <> ref_uri) 45 + } 46 + } 47 + 48 + /// Find a lexicon by its NSID 49 + fn find_lexicon( 50 + nsid: String, 51 + lexicons: List(schema_builder.Lexicon), 52 + ) -> Result(schema_builder.Lexicon, Nil) { 53 + lexicons 54 + |> list.find(fn(lex) { lex.id == nsid }) 55 + }
+150
lexicon_graphql/src/lexicon_graphql/schema_builder.gleam
··· 1 + /// Schema Builder 2 + /// 3 + /// Builds GraphQL schemas from AT Protocol lexicon definitions. 4 + /// Simplified MVP version - handles basic record types only. 5 + import gleam/list 6 + import gleam/option.{None} 7 + import graphql/schema 8 + import graphql/value 9 + import lexicon_graphql/nsid 10 + import lexicon_graphql/type_mapper 11 + 12 + /// Lexicon definition structure (simplified) 13 + pub type Lexicon { 14 + Lexicon(id: String, defs: Defs) 15 + } 16 + 17 + /// Lexicon definitions container 18 + pub type Defs { 19 + Defs(main: RecordDef) 20 + } 21 + 22 + /// Record definition 23 + pub type RecordDef { 24 + RecordDef(type_: String, properties: List(#(String, Property))) 25 + } 26 + 27 + /// Property definition 28 + pub type Property { 29 + Property(type_: String, required: Bool) 30 + } 31 + 32 + /// Record type metadata 33 + type RecordType { 34 + RecordType( 35 + nsid: String, 36 + type_name: String, 37 + field_name: String, 38 + fields: List(schema.Field), 39 + ) 40 + } 41 + 42 + /// Builds a GraphQL schema from a list of lexicon definitions. 43 + /// 44 + /// Returns a Schema that can be used for query execution. 45 + pub fn build_schema(lexicons: List(Lexicon)) -> Result(schema.Schema, String) { 46 + case lexicons { 47 + [] -> Error("Cannot build schema from empty lexicon list") 48 + _ -> { 49 + // Extract record types from lexicons 50 + let record_types = extract_record_types(lexicons) 51 + 52 + // Build the query type with fields for each record 53 + let query_type = build_query_type(record_types) 54 + 55 + // Create the schema 56 + Ok(schema.schema(query_type, None)) 57 + } 58 + } 59 + } 60 + 61 + /// Extract record types from lexicon definitions 62 + fn extract_record_types(lexicons: List(Lexicon)) -> List(RecordType) { 63 + lexicons 64 + |> list.filter_map(parse_lexicon) 65 + } 66 + 67 + /// Parse a single lexicon into a RecordType 68 + fn parse_lexicon(lexicon: Lexicon) -> Result(RecordType, Nil) { 69 + case lexicon { 70 + Lexicon(id, Defs(RecordDef("record", properties))) -> { 71 + let type_name = nsid.to_type_name(id) 72 + let field_name = nsid.to_field_name(id) 73 + let fields = build_fields(properties) 74 + 75 + Ok(RecordType( 76 + nsid: id, 77 + type_name: type_name, 78 + field_name: field_name, 79 + fields: fields, 80 + )) 81 + } 82 + _ -> Error(Nil) 83 + } 84 + } 85 + 86 + /// Build GraphQL fields from lexicon properties 87 + fn build_fields(properties: List(#(String, Property))) -> List(schema.Field) { 88 + // Add standard AT Proto fields 89 + let standard_fields = [ 90 + schema.field("uri", schema.string_type(), "Record URI", fn(_ctx) { 91 + Ok(value.String("at://did:plc:example/collection/rkey")) 92 + }), 93 + schema.field("cid", schema.string_type(), "Record CID", fn(_ctx) { 94 + Ok(value.String("bafyreicid")) 95 + }), 96 + schema.field("did", schema.string_type(), "DID of record author", fn(_ctx) { 97 + Ok(value.String("did:plc:example")) 98 + }), 99 + schema.field( 100 + "indexedAt", 101 + schema.string_type(), 102 + "When record was indexed", 103 + fn(_ctx) { Ok(value.String("2024-01-01T00:00:00Z")) }, 104 + ), 105 + ] 106 + 107 + // Build fields from lexicon properties 108 + let lexicon_fields = 109 + list.map(properties, fn(prop) { 110 + let #(name, Property(type_, _required)) = prop 111 + let graphql_type = type_mapper.map_type(type_) 112 + 113 + schema.field(name, graphql_type, "Field from lexicon", fn(_ctx) { 114 + Ok(value.Null) 115 + }) 116 + }) 117 + 118 + // Combine standard and lexicon fields 119 + list.append(standard_fields, lexicon_fields) 120 + } 121 + 122 + /// Build the root Query type with fields for each record type 123 + fn build_query_type(record_types: List(RecordType)) -> schema.Type { 124 + let query_fields = 125 + list.map(record_types, fn(record_type) { 126 + // Build the object type for this record 127 + let object_type = 128 + schema.object_type( 129 + record_type.type_name, 130 + "Record type: " <> record_type.nsid, 131 + record_type.fields, 132 + ) 133 + 134 + // Create a list type for the query field 135 + let list_type = schema.list_type(object_type) 136 + 137 + // Create query field that returns a list of this record type 138 + schema.field( 139 + record_type.field_name, 140 + list_type, 141 + "Query " <> record_type.nsid, 142 + fn(_ctx) { 143 + // For now, return empty list - database resolver will be added later 144 + Ok(value.List([])) 145 + }, 146 + ) 147 + }) 148 + 149 + schema.object_type("Query", "Root query type", query_fields) 150 + }
+39
lexicon_graphql/src/lexicon_graphql/type_mapper.gleam
··· 1 + /// Lexicon Type Mapper 2 + /// 3 + /// Maps AT Protocol lexicon types to GraphQL types. 4 + /// Simplified MVP version - handles basic types only. 5 + /// 6 + /// Based on the Elixir implementation but adapted for the pure Gleam GraphQL library. 7 + import graphql/schema 8 + 9 + /// Maps a lexicon type string to a GraphQL Type. 10 + /// 11 + /// ## Examples 12 + /// 13 + /// ```gleam 14 + /// map_type("string") // schema.string_type() 15 + /// map_type("integer") // schema.int_type() 16 + /// map_type("boolean") // schema.boolean_type() 17 + /// ``` 18 + pub fn map_type(lexicon_type: String) -> schema.Type { 19 + case lexicon_type { 20 + // Primitive types 21 + "string" -> schema.string_type() 22 + "integer" -> schema.int_type() 23 + "boolean" -> schema.boolean_type() 24 + "number" -> schema.float_type() 25 + 26 + // Binary/blob types - map to String (base64 or URL) 27 + "blob" -> schema.string_type() 28 + "bytes" -> schema.string_type() 29 + "cid-link" -> schema.string_type() 30 + 31 + // Complex types - simplified to String for MVP 32 + "unknown" -> schema.string_type() 33 + "ref" -> schema.string_type() 34 + "union" -> schema.string_type() 35 + 36 + // Default fallback for any unknown type 37 + _ -> schema.string_type() 38 + } 39 + }
+102
lexicon_graphql/test/lexicon_graphql/lexicon_parser_test.gleam
··· 1 + /// Tests for Lexicon JSON Parser 2 + /// 3 + /// Parses AT Protocol lexicon JSON into structured Lexicon types 4 + import gleam/list 5 + import gleeunit/should 6 + import lexicon_graphql/lexicon_parser 7 + import lexicon_graphql/schema_builder 8 + 9 + // Test parsing a simple record lexicon 10 + pub fn parse_simple_record_lexicon_test() { 11 + let json = 12 + "{ 13 + \"lexicon\": 1, 14 + \"id\": \"xyz.statusphere.status\", 15 + \"defs\": { 16 + \"main\": { 17 + \"type\": \"record\", 18 + \"record\": { 19 + \"type\": \"object\", 20 + \"required\": [\"text\"], 21 + \"properties\": { 22 + \"text\": {\"type\": \"string\"}, 23 + \"createdAt\": {\"type\": \"string\"} 24 + } 25 + } 26 + } 27 + } 28 + }" 29 + 30 + let result = lexicon_parser.parse_lexicon(json) 31 + 32 + should.be_ok(result) 33 + 34 + // Verify the parsed lexicon has correct structure 35 + case result { 36 + Ok(lexicon) -> { 37 + should.equal(lexicon.id, "xyz.statusphere.status") 38 + // Verify it has properties 39 + case lexicon.defs.main { 40 + schema_builder.RecordDef(type_: "record", properties: props) -> { 41 + // Should have at least text and createdAt properties 42 + should.be_true(list.length(props) >= 2) 43 + } 44 + schema_builder.RecordDef(type_: _, properties: _) -> { 45 + should.fail() 46 + } 47 + } 48 + } 49 + Error(_) -> should.fail() 50 + } 51 + } 52 + 53 + // Test parsing lexicon with no required fields 54 + pub fn parse_lexicon_with_optional_fields_test() { 55 + let json = 56 + "{ 57 + \"lexicon\": 1, 58 + \"id\": \"xyz.statusphere.profile\", 59 + \"defs\": { 60 + \"main\": { 61 + \"type\": \"record\", 62 + \"record\": { 63 + \"type\": \"object\", 64 + \"properties\": { 65 + \"displayName\": {\"type\": \"string\"}, 66 + \"bio\": {\"type\": \"string\"} 67 + } 68 + } 69 + } 70 + } 71 + }" 72 + 73 + let result = lexicon_parser.parse_lexicon(json) 74 + 75 + should.be_ok(result) 76 + } 77 + 78 + // Test parsing invalid JSON 79 + pub fn parse_invalid_json_test() { 80 + let json = "{invalid json" 81 + 82 + let result = lexicon_parser.parse_lexicon(json) 83 + 84 + should.be_error(result) 85 + } 86 + 87 + // Test parsing JSON with missing required fields 88 + pub fn parse_lexicon_missing_id_test() { 89 + let json = 90 + "{ 91 + \"lexicon\": 1, 92 + \"defs\": { 93 + \"main\": { 94 + \"type\": \"record\" 95 + } 96 + } 97 + }" 98 + 99 + let result = lexicon_parser.parse_lexicon(json) 100 + 101 + should.be_error(result) 102 + }
+39
lexicon_graphql/test/lexicon_graphql/nsid_test.gleam
··· 1 + /// Tests for NSID (Namespaced Identifier) utilities 2 + /// 3 + /// NSIDs are used in AT Protocol to identify lexicons and collections 4 + import gleeunit/should 5 + import lexicon_graphql/nsid 6 + 7 + pub fn nsid_to_type_name_test() { 8 + nsid.to_type_name("xyz.statusphere.status") 9 + |> should.equal("XyzStatusphereStatus") 10 + 11 + nsid.to_type_name("app.bsky.feed.post") 12 + |> should.equal("AppBskyFeedPost") 13 + 14 + nsid.to_type_name("com.atproto.repo.createRecord") 15 + |> should.equal("ComAtprotoRepoCreateRecord") 16 + } 17 + 18 + pub fn nsid_to_field_name_test() { 19 + nsid.to_field_name("xyz.statusphere.status") 20 + |> should.equal("xyzStatusphereStatus") 21 + 22 + nsid.to_field_name("app.bsky.feed.post") 23 + |> should.equal("appBskyFeedPost") 24 + 25 + nsid.to_field_name("com.atproto.repo.createRecord") 26 + |> should.equal("comAtprotoRepoCreateRecord") 27 + } 28 + 29 + pub fn nsid_to_collection_name_test() { 30 + // Collection names are just the last part of the NSID 31 + nsid.to_collection_name("xyz.statusphere.status") 32 + |> should.equal("status") 33 + 34 + nsid.to_collection_name("app.bsky.feed.post") 35 + |> should.equal("post") 36 + 37 + nsid.to_collection_name("com.atproto.repo.createRecord") 38 + |> should.equal("createRecord") 39 + }
+89
lexicon_graphql/test/lexicon_graphql/ref_resolver_test.gleam
··· 1 + /// Tests for Lexicon Reference Resolver 2 + /// 3 + /// Resolves ref types in lexicon definitions to their actual types 4 + import gleeunit/should 5 + import lexicon_graphql/ref_resolver 6 + import lexicon_graphql/schema_builder 7 + 8 + // Test resolving a local reference (within same lexicon) 9 + pub fn resolve_local_ref_test() { 10 + // Lexicon with a main record that references another type in the same lexicon 11 + let lexicon = 12 + schema_builder.Lexicon( 13 + id: "xyz.statusphere.post", 14 + defs: schema_builder.Defs( 15 + main: schema_builder.RecordDef(type_: "record", properties: [ 16 + #("text", schema_builder.Property("string", True)), 17 + #("embed", schema_builder.Property("ref", False)), 18 + ]), 19 + ), 20 + ) 21 + 22 + // Reference should resolve to a type within the same lexicon 23 + let result = ref_resolver.resolve_ref("xyz.statusphere.post#embed", [lexicon]) 24 + 25 + should.be_ok(result) 26 + } 27 + 28 + // Test resolving an external reference (different lexicon) 29 + pub fn resolve_external_ref_test() { 30 + let post_lexicon = 31 + schema_builder.Lexicon( 32 + id: "xyz.statusphere.post", 33 + defs: schema_builder.Defs( 34 + main: schema_builder.RecordDef(type_: "record", properties: [ 35 + #("text", schema_builder.Property("string", True)), 36 + #("author", schema_builder.Property("ref", False)), 37 + ]), 38 + ), 39 + ) 40 + 41 + let profile_lexicon = 42 + schema_builder.Lexicon( 43 + id: "xyz.statusphere.profile", 44 + defs: schema_builder.Defs( 45 + main: schema_builder.RecordDef(type_: "record", properties: [ 46 + #("displayName", schema_builder.Property("string", True)), 47 + ]), 48 + ), 49 + ) 50 + 51 + // Reference to different lexicon 52 + let result = 53 + ref_resolver.resolve_ref("xyz.statusphere.profile", [ 54 + post_lexicon, 55 + profile_lexicon, 56 + ]) 57 + 58 + should.be_ok(result) 59 + } 60 + 61 + // Test error when reference not found 62 + pub fn resolve_nonexistent_ref_test() { 63 + let lexicon = 64 + schema_builder.Lexicon( 65 + id: "xyz.statusphere.post", 66 + defs: schema_builder.Defs( 67 + main: schema_builder.RecordDef(type_: "record", properties: [ 68 + #("text", schema_builder.Property("string", True)), 69 + ]), 70 + ), 71 + ) 72 + 73 + // Try to resolve a reference that doesn't exist 74 + let result = 75 + ref_resolver.resolve_ref("xyz.statusphere.nonexistent", [lexicon]) 76 + 77 + should.be_error(result) 78 + } 79 + 80 + // Test parsing ref URI format 81 + pub fn parse_ref_uri_test() { 82 + // Test parsing full NSID 83 + ref_resolver.parse_ref_uri("xyz.statusphere.profile") 84 + |> should.equal(#("xyz.statusphere.profile", "main")) 85 + 86 + // Test parsing NSID with fragment 87 + ref_resolver.parse_ref_uri("xyz.statusphere.post#embed") 88 + |> should.equal(#("xyz.statusphere.post", "embed")) 89 + }
+79
lexicon_graphql/test/lexicon_graphql/schema_builder_test.gleam
··· 1 + /// Tests for Schema Builder 2 + /// 3 + /// Builds GraphQL schemas from AT Protocol lexicon definitions 4 + import gleeunit/should 5 + import lexicon_graphql/schema_builder 6 + 7 + // Test building a schema from a simple lexicon 8 + pub fn build_simple_schema_test() { 9 + // Simple status lexicon with text field 10 + let lexicon = 11 + schema_builder.Lexicon( 12 + id: "xyz.statusphere.status", 13 + defs: schema_builder.Defs( 14 + main: schema_builder.RecordDef(type_: "record", properties: [ 15 + #("text", schema_builder.Property("string", False)), 16 + #("createdAt", schema_builder.Property("string", True)), 17 + ]), 18 + ), 19 + ) 20 + 21 + let result = schema_builder.build_schema([lexicon]) 22 + 23 + // Should successfully build a schema 24 + should.be_ok(result) 25 + } 26 + 27 + // Test building schema with multiple lexicons 28 + pub fn build_schema_with_multiple_lexicons_test() { 29 + let status_lexicon = 30 + schema_builder.Lexicon( 31 + id: "xyz.statusphere.status", 32 + defs: schema_builder.Defs( 33 + main: schema_builder.RecordDef(type_: "record", properties: [ 34 + #("text", schema_builder.Property("string", False)), 35 + ]), 36 + ), 37 + ) 38 + 39 + let profile_lexicon = 40 + schema_builder.Lexicon( 41 + id: "xyz.statusphere.profile", 42 + defs: schema_builder.Defs( 43 + main: schema_builder.RecordDef(type_: "record", properties: [ 44 + #("displayName", schema_builder.Property("string", False)), 45 + ]), 46 + ), 47 + ) 48 + 49 + let result = schema_builder.build_schema([status_lexicon, profile_lexicon]) 50 + 51 + should.be_ok(result) 52 + } 53 + 54 + // Test that the schema has correct type names 55 + pub fn schema_has_correct_type_names_test() { 56 + let lexicon = 57 + schema_builder.Lexicon( 58 + id: "app.bsky.feed.post", 59 + defs: schema_builder.Defs( 60 + main: schema_builder.RecordDef(type_: "record", properties: [ 61 + #("text", schema_builder.Property("string", True)), 62 + ]), 63 + ), 64 + ) 65 + 66 + let result = schema_builder.build_schema([lexicon]) 67 + 68 + should.be_ok(result) 69 + // Type name should be AppBskyFeedPost (PascalCase from NSID) 70 + // Field name should be appBskyFeedPost (camelCase from NSID) 71 + } 72 + 73 + // Test empty lexicon list 74 + pub fn build_schema_with_empty_list_test() { 75 + let result = schema_builder.build_schema([]) 76 + 77 + // Should return error for empty lexicon list 78 + should.be_error(result) 79 + }
+69
lexicon_graphql/test/lexicon_graphql/type_mapper_test.gleam
··· 1 + /// Tests for Lexicon Type Mapper 2 + /// 3 + /// Maps AT Protocol lexicon types to GraphQL types 4 + import gleeunit/should 5 + import graphql/schema 6 + import lexicon_graphql/type_mapper 7 + 8 + pub fn map_string_type_test() { 9 + type_mapper.map_type("string") 10 + |> should.equal(schema.string_type()) 11 + } 12 + 13 + pub fn map_integer_type_test() { 14 + type_mapper.map_type("integer") 15 + |> should.equal(schema.int_type()) 16 + } 17 + 18 + pub fn map_boolean_type_test() { 19 + type_mapper.map_type("boolean") 20 + |> should.equal(schema.boolean_type()) 21 + } 22 + 23 + pub fn map_number_type_test() { 24 + // Lexicon "number" maps to GraphQL Float 25 + type_mapper.map_type("number") 26 + |> should.equal(schema.float_type()) 27 + } 28 + 29 + pub fn map_unknown_type_test() { 30 + // Unknown types map to String (JSON-serialized) 31 + type_mapper.map_type("unknown") 32 + |> should.equal(schema.string_type()) 33 + } 34 + 35 + pub fn map_blob_type_test() { 36 + // Blob types map to String (URL or base64) 37 + type_mapper.map_type("blob") 38 + |> should.equal(schema.string_type()) 39 + } 40 + 41 + pub fn map_bytes_type_test() { 42 + // Bytes map to String (base64 encoded) 43 + type_mapper.map_type("bytes") 44 + |> should.equal(schema.string_type()) 45 + } 46 + 47 + pub fn map_cid_link_type_test() { 48 + // CID links map to String 49 + type_mapper.map_type("cid-link") 50 + |> should.equal(schema.string_type()) 51 + } 52 + 53 + pub fn map_ref_type_test() { 54 + // Ref types map to String for now (URI reference) 55 + type_mapper.map_type("ref") 56 + |> should.equal(schema.string_type()) 57 + } 58 + 59 + pub fn map_union_type_test() { 60 + // Union types map to String for now 61 + type_mapper.map_type("union") 62 + |> should.equal(schema.string_type()) 63 + } 64 + 65 + pub fn map_default_fallback_test() { 66 + // Any unknown type falls back to String 67 + type_mapper.map_type("somethingWeird") 68 + |> should.equal(schema.string_type()) 69 + }
+13
lexicon_graphql/test/lexicon_graphql_test.gleam
··· 1 + import gleeunit 2 + 3 + pub fn main() -> Nil { 4 + gleeunit.main() 5 + } 6 + 7 + // gleeunit test functions end in `_test` 8 + pub fn hello_world_test() { 9 + let name = "Joe" 10 + let greeting = "Hello, " <> name <> "!" 11 + 12 + assert greeting == "Hello, Joe!" 13 + }
+111
scripts/create_record_with_token.sh
··· 1 + #!/bin/bash 2 + 3 + # Create Record Script - Takes access token as argument 4 + # Usage: ./create_record_with_token.sh <ACCESS_TOKEN> 5 + 6 + set -e 7 + 8 + LOCALHOST_API="http://localhost:8000" 9 + 10 + # Check if token was provided 11 + if [ -z "$1" ]; then 12 + echo "❌ Access token required" 13 + echo "" 14 + echo "Usage: $0 <ACCESS_TOKEN>" 15 + echo "" 16 + echo "Get your access token from the AIP server first, then run:" 17 + echo " $0 YOUR_ACCESS_TOKEN" 18 + exit 1 19 + fi 20 + 21 + ACCESS_TOKEN="$1" 22 + 23 + echo "🔐 QuickSlice Record Creation Tool" 24 + echo "Using access token: ${ACCESS_TOKEN:0:20}..." 25 + echo "" 26 + 27 + # Get ATProto session 28 + echo "🔍 Getting ATProto session from AIP..." 29 + AIP_BASE_URL="${AIP_BASE_URL:-https://tunnel.chadtmiller.com}" 30 + SESSION_RESPONSE=$(curl -s "$AIP_BASE_URL/api/atprotocol/session" \ 31 + -H "Authorization: Bearer $ACCESS_TOKEN") 32 + 33 + echo "📋 Full ATProto Session Response:" 34 + echo "$SESSION_RESPONSE" | jq '.' 35 + echo "" 36 + 37 + DID=$(echo "$SESSION_RESPONSE" | jq -r '.did // empty') 38 + HANDLE=$(echo "$SESSION_RESPONSE" | jq -r '.handle // empty') 39 + PDS_ENDPOINT=$(echo "$SESSION_RESPONSE" | jq -r '.pds_endpoint // empty') 40 + 41 + if [ -z "$DID" ]; then 42 + echo "❌ Failed to get ATProto session" 43 + echo "Response: $SESSION_RESPONSE" 44 + exit 1 45 + fi 46 + 47 + echo "✅ ATProto session retrieved!" 48 + echo " DID: $DID" 49 + echo " Handle: $HANDLE" 50 + echo " PDS: $PDS_ENDPOINT" 51 + echo "" 52 + 53 + # Create a status record 54 + echo "📝 Creating status record..." 55 + 56 + # Prompt for status emoji 57 + read -p "Enter status emoji (single emoji, or press Enter for default): " STATUS_EMOJI 58 + if [ -z "$STATUS_EMOJI" ]; then 59 + STATUS_EMOJI="✨" 60 + fi 61 + 62 + # Create the record JSON 63 + RECORD_JSON=$(cat <<EOF 64 + { 65 + "repo": "$DID", 66 + "collection": "xyz.statusphere.status", 67 + "record": { 68 + "status": "$STATUS_EMOJI", 69 + "createdAt": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" 70 + } 71 + } 72 + EOF 73 + ) 74 + 75 + # Make the request to localhost:8000 76 + echo "🚀 Sending request to $LOCALHOST_API..." 77 + echo "📤 Request body:" 78 + echo "$RECORD_JSON" | jq '.' 79 + echo "" 80 + 81 + CREATE_RESPONSE=$(curl -s -w "\n%{http_code}" -X POST \ 82 + "$LOCALHOST_API/xrpc/xyz.statusphere.status.createRecord" \ 83 + -H "Authorization: Bearer $ACCESS_TOKEN" \ 84 + -H "Content-Type: application/json" \ 85 + -d "$RECORD_JSON") 86 + 87 + # Extract status code (last line) and body (everything else) 88 + HTTP_CODE=$(echo "$CREATE_RESPONSE" | tail -n 1) 89 + RESPONSE_BODY=$(echo "$CREATE_RESPONSE" | sed '$d') 90 + 91 + if [ "$HTTP_CODE" = "200" ] || [ "$HTTP_CODE" = "201" ]; then 92 + echo "✅ Record created successfully!" 93 + echo "" 94 + echo "Response:" 95 + echo "$RESPONSE_BODY" | jq '.' 96 + echo "" 97 + 98 + RECORD_URI=$(echo "$RESPONSE_BODY" | jq -r '.uri // empty') 99 + if [ -n "$RECORD_URI" ]; then 100 + echo "📍 Record URI: $RECORD_URI" 101 + fi 102 + else 103 + echo "❌ Failed to create record (HTTP $HTTP_CODE)" 104 + echo "" 105 + echo "Response:" 106 + echo "$RESPONSE_BODY" | jq '.' 2>/dev/null || echo "$RESPONSE_BODY" 107 + exit 1 108 + fi 109 + 110 + echo "" 111 + echo "🎉 Done!"
+6
server/.gitignore
··· 1 + *.beam 2 + *.ez 3 + /build 4 + erl_crash.dump 5 + *.db* 6 + .env
+24
server/README.md
··· 1 + # server 2 + 3 + [![Package Version](https://img.shields.io/hexpm/v/atproto_gleam)](https://hex.pm/packages/atproto_gleam) 4 + [![Hex Docs](https://img.shields.io/badge/hex-docs-ffaff3)](https://hexdocs.pm/atproto_gleam/) 5 + 6 + ```sh 7 + gleam add server@1 8 + ``` 9 + ```gleam 10 + import server 11 + 12 + pub fn main() -> Nil { 13 + // TODO: An example of the project in use 14 + } 15 + ``` 16 + 17 + Further documentation can be found at <https://hexdocs.pm/server>. 18 + 19 + ## Development 20 + 21 + ```sh 22 + gleam run # Run the project 23 + gleam test # Run the tests 24 + ```
+39
server/gleam.toml
··· 1 + name = "server" 2 + version = "1.0.0" 3 + 4 + # Fill out these fields if you intend to generate HTML documentation or publish 5 + # your project to the Hex package manager. 6 + # 7 + # description = "" 8 + # licences = ["Apache-2.0"] 9 + # repository = { type = "github", user = "", repo = "" } 10 + # links = [{ title = "Website", href = "" }] 11 + # 12 + # For a full reference of all the available options, you can have a look at 13 + # https://gleam.run/writing-gleam/gleam-toml/. 14 + 15 + [dependencies] 16 + jetstream = { path = "../jetstream" } 17 + lexicon = { path = "../lexicon" } 18 + graphql = { path = "../graphql" } 19 + lexicon_graphql = { path = "../lexicon_graphql" } 20 + gleam_stdlib = ">= 0.60.0 and < 1.0.0" 21 + mist = ">= 5.0.3 and < 6.0.0" 22 + wisp = ">= 2.1.0 and < 3.0.0" 23 + gleam_erlang = ">= 1.0.0 and < 2.0.0" 24 + gleam_otp = ">= 1.2.0 and < 2.0.0" 25 + gleam_http = ">= 4.0.0 and < 5.0.0" 26 + gleam_json = ">= 3.0.2 and < 4.0.0" 27 + gleam_httpc = ">= 5.0.0 and < 6.0.0" 28 + sqlight = ">= 1.0.0 and < 2.0.0" 29 + gleam_time = ">= 1.4.0 and < 2.0.0" 30 + lustre = ">= 5.0.0 and < 6.0.0" 31 + simplifile = ">= 2.0.0 and < 3.0.0" 32 + argv = ">= 1.0.0 and < 2.0.0" 33 + jose = ">= 1.11.10 and < 2.0.0" 34 + envoy = ">= 1.0.2 and < 2.0.0" 35 + dotenv_gleam = ">= 2.0.1 and < 3.0.0" 36 + thoas = ">= 1.0.0 and < 2.0.0" 37 + 38 + [dev-dependencies] 39 + gleeunit = ">= 1.0.0 and < 2.0.0"
+67
server/manifest.toml
··· 1 + # This file was generated by Gleam 2 + # You typically do not need to edit this file 3 + 4 + packages = [ 5 + { name = "argv", version = "1.0.2", build_tools = ["gleam"], requirements = [], otp_app = "argv", source = "hex", outer_checksum = "BA1FF0929525DEBA1CE67256E5ADF77A7CDDFE729E3E3F57A5BDCAA031DED09D" }, 6 + { name = "cowlib", version = "2.16.0", build_tools = ["make", "rebar3"], requirements = [], otp_app = "cowlib", source = "hex", outer_checksum = "7F478D80D66B747344F0EA7708C187645CFCC08B11AA424632F78E25BF05DB51" }, 7 + { name = "directories", version = "1.2.0", build_tools = ["gleam"], requirements = ["envoy", "gleam_stdlib", "platform", "simplifile"], otp_app = "directories", source = "hex", outer_checksum = "D13090CFCDF6759B87217E8DDD73A75903A700148A82C1D33799F333E249BF9E" }, 8 + { name = "dotenv_gleam", version = "2.0.1", build_tools = ["gleam"], requirements = ["envoy", "gleam_erlang", "gleam_stdlib", "simplifile"], otp_app = "dotenv_gleam", source = "hex", outer_checksum = "47391525F97AF2086B34A4F2E81C1A1102863ACA983540CD87A8D295B1636445" }, 9 + { name = "envoy", version = "1.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "envoy", source = "hex", outer_checksum = "95FD059345AA982E89A0B6E2A3BF1CF43E17A7048DCD85B5B65D3B9E4E39D359" }, 10 + { name = "esqlite", version = "0.9.0", build_tools = ["rebar3"], requirements = [], otp_app = "esqlite", source = "hex", outer_checksum = "CCF72258A4EE152EC7AD92AA9A03552EB6CA1B06B65C93AD5B6E55C302E05855" }, 11 + { name = "exception", version = "2.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "exception", source = "hex", outer_checksum = "329D269D5C2A314F7364BD2711372B6F2C58FA6F39981572E5CA68624D291F8C" }, 12 + { name = "filepath", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "B06A9AF0BF10E51401D64B98E4B627F1D2E48C154967DA7AF4D0914780A6D40A" }, 13 + { name = "gleam_crypto", version = "1.5.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_crypto", source = "hex", outer_checksum = "50774BAFFF1144E7872814C566C5D653D83A3EBF23ACC3156B757A1B6819086E" }, 14 + { name = "gleam_erlang", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_erlang", source = "hex", outer_checksum = "1124AD3AA21143E5AF0FC5CF3D9529F6DB8CA03E43A55711B60B6B7B3874375C" }, 15 + { name = "gleam_http", version = "4.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_http", source = "hex", outer_checksum = "82EA6A717C842456188C190AFB372665EA56CE13D8559BF3B1DD9E40F619EE0C" }, 16 + { name = "gleam_httpc", version = "5.0.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_http", "gleam_stdlib"], otp_app = "gleam_httpc", source = "hex", outer_checksum = "C545172618D07811494E97AAA4A0FB34DA6F6D0061FDC8041C2F8E3BE2B2E48F" }, 17 + { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" }, 18 + { name = "gleam_otp", version = "1.2.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_stdlib"], otp_app = "gleam_otp", source = "hex", outer_checksum = "BA6A294E295E428EC1562DC1C11EA7530DCB981E8359134BEABC8493B7B2258E" }, 19 + { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" }, 20 + { name = "gleam_time", version = "1.4.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_time", source = "hex", outer_checksum = "DCDDC040CE97DA3D2A925CDBBA08D8A78681139745754A83998641C8A3F6587E" }, 21 + { name = "gleam_yielder", version = "1.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_yielder", source = "hex", outer_checksum = "8E4E4ECFA7982859F430C57F549200C7749823C106759F4A19A78AEA6687717A" }, 22 + { name = "gleeunit", version = "1.6.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "FDC68A8C492B1E9B429249062CD9BAC9B5538C6FBF584817205D0998C42E1DAC" }, 23 + { name = "glisten", version = "8.0.1", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_otp", "gleam_stdlib", "logging", "telemetry"], otp_app = "glisten", source = "hex", outer_checksum = "534BB27C71FB9E506345A767C0D76B17A9E9199934340C975DC003C710E3692D" }, 24 + { name = "gramps", version = "6.0.0", build_tools = ["gleam"], requirements = ["gleam_crypto", "gleam_erlang", "gleam_http", "gleam_stdlib"], otp_app = "gramps", source = "hex", outer_checksum = "8B7195978FBFD30B43DF791A8A272041B81E45D245314D7A41FC57237AA882A0" }, 25 + { name = "graphql", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], source = "local", path = "../graphql" }, 26 + { name = "gun", version = "2.2.0", build_tools = ["make", "rebar3"], requirements = ["cowlib"], otp_app = "gun", source = "hex", outer_checksum = "76022700C64287FEB4DF93A1795CFF6741B83FB37415C40C34C38D2A4645261A" }, 27 + { name = "houdini", version = "1.2.0", build_tools = ["gleam"], requirements = [], otp_app = "houdini", source = "hex", outer_checksum = "5DB1053F1AF828049C2B206D4403C18970ABEF5C18671CA3C2D2ED0DD64F6385" }, 28 + { name = "hpack_erl", version = "0.3.0", build_tools = ["rebar3"], requirements = [], otp_app = "hpack", source = "hex", outer_checksum = "D6137D7079169D8C485C6962DFE261AF5B9EF60FBC557344511C1E65E3D95FB0" }, 29 + { name = "jetstream", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_http", "gleam_json", "gleam_stdlib", "gun"], source = "local", path = "../jetstream" }, 30 + { name = "jose", version = "1.11.10", build_tools = ["mix", "rebar3"], requirements = [], otp_app = "jose", source = "hex", outer_checksum = "0D6CD36FF8BA174DB29148FC112B5842186B68A90CE9FC2B3EC3AFE76593E614" }, 31 + { name = "lexicon", version = "0.1.0", build_tools = ["gleam"], requirements = ["gleam_json", "gleam_stdlib"], source = "local", path = "../lexicon" }, 32 + { name = "lexicon_graphql", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_json", "gleam_stdlib", "graphql"], source = "local", path = "../lexicon_graphql" }, 33 + { name = "logging", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "logging", source = "hex", outer_checksum = "1098FBF10B54B44C2C7FDF0B01C1253CAFACDACABEFB4B0D027803246753E06D" }, 34 + { name = "lustre", version = "5.3.5", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_json", "gleam_otp", "gleam_stdlib", "houdini"], otp_app = "lustre", source = "hex", outer_checksum = "5CBB5DD2849D8316A2101792FC35AEB58CE4B151451044A9C2A2A70A2F7FCEB8" }, 35 + { name = "marceau", version = "1.3.0", build_tools = ["gleam"], requirements = [], otp_app = "marceau", source = "hex", outer_checksum = "2D1C27504BEF45005F5DFB18591F8610FB4BFA91744878210BDC464412EC44E9" }, 36 + { name = "mist", version = "5.0.3", build_tools = ["gleam"], requirements = ["exception", "gleam_erlang", "gleam_http", "gleam_otp", "gleam_stdlib", "gleam_yielder", "glisten", "gramps", "hpack_erl", "logging"], otp_app = "mist", source = "hex", outer_checksum = "7C4BE717A81305323C47C8A591E6B9BA4AC7F56354BF70B4D3DF08CC01192668" }, 37 + { name = "platform", version = "1.0.0", build_tools = ["gleam"], requirements = [], otp_app = "platform", source = "hex", outer_checksum = "8339420A95AD89AAC0F82F4C3DB8DD401041742D6C3F46132A8739F6AEB75391" }, 38 + { name = "simplifile", version = "2.3.0", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "0A868DAC6063D9E983477981839810DC2E553285AB4588B87E3E9C96A7FB4CB4" }, 39 + { name = "sqlight", version = "1.0.3", build_tools = ["gleam"], requirements = ["esqlite", "gleam_stdlib"], otp_app = "sqlight", source = "hex", outer_checksum = "CADD79663C9B61D4BAC960A47CC2D42CA8F48EAF5804DBEB79977287750F4B16" }, 40 + { name = "telemetry", version = "1.3.0", build_tools = ["rebar3"], requirements = [], otp_app = "telemetry", source = "hex", outer_checksum = "7015FC8919DBE63764F4B4B87A95B7C0996BD539E0D499BE6EC9D7F3875B79E6" }, 41 + { name = "thoas", version = "1.2.1", build_tools = ["rebar3"], requirements = [], otp_app = "thoas", source = "hex", outer_checksum = "E38697EDFFD6E91BD12CEA41B155115282630075C2A727E7A6B2947F5408B86A" }, 42 + { name = "wisp", version = "2.1.0", build_tools = ["gleam"], requirements = ["directories", "exception", "filepath", "gleam_crypto", "gleam_erlang", "gleam_http", "gleam_json", "gleam_stdlib", "houdini", "logging", "marceau", "mist", "simplifile"], otp_app = "wisp", source = "hex", outer_checksum = "362BDDD11BF48EB38CDE51A73BC7D1B89581B395CA998E3F23F11EC026151C54" }, 43 + ] 44 + 45 + [requirements] 46 + argv = { version = ">= 1.0.0 and < 2.0.0" } 47 + dotenv_gleam = { version = ">= 2.0.1 and < 3.0.0" } 48 + envoy = { version = ">= 1.0.2 and < 2.0.0" } 49 + gleam_erlang = { version = ">= 1.0.0 and < 2.0.0" } 50 + gleam_http = { version = ">= 4.0.0 and < 5.0.0" } 51 + gleam_httpc = { version = ">= 5.0.0 and < 6.0.0" } 52 + gleam_json = { version = ">= 3.0.2 and < 4.0.0" } 53 + gleam_otp = { version = ">= 1.2.0 and < 2.0.0" } 54 + gleam_stdlib = { version = ">= 0.60.0 and < 1.0.0" } 55 + gleam_time = { version = ">= 1.4.0 and < 2.0.0" } 56 + gleeunit = { version = ">= 1.0.0 and < 2.0.0" } 57 + graphql = { path = "../graphql" } 58 + jetstream = { path = "../jetstream" } 59 + jose = { version = ">= 1.11.10 and < 2.0.0" } 60 + lexicon = { path = "../lexicon" } 61 + lexicon_graphql = { path = "../lexicon_graphql" } 62 + lustre = { version = ">= 5.0.0 and < 6.0.0" } 63 + mist = { version = ">= 5.0.3 and < 6.0.0" } 64 + simplifile = { version = ">= 2.0.0 and < 3.0.0" } 65 + sqlight = { version = ">= 1.0.0 and < 2.0.0" } 66 + thoas = { version = ">= 1.0.0 and < 2.0.0" } 67 + wisp = { version = ">= 2.1.0 and < 3.0.0" }
+23
server/priv/lexicons/xyz/statusphere/status.json
··· 1 + { 2 + "lexicon": 1, 3 + "id": "xyz.statusphere.status", 4 + "defs": { 5 + "main": { 6 + "type": "record", 7 + "key": "tid", 8 + "record": { 9 + "type": "object", 10 + "required": ["status", "createdAt"], 11 + "properties": { 12 + "status": { 13 + "type": "string", 14 + "minLength": 1, 15 + "maxGraphemes": 1, 16 + "maxLength": 32 17 + }, 18 + "createdAt": { "type": "string", "format": "datetime" } 19 + } 20 + } 21 + } 22 + } 23 + }
+230
server/src/atproto_auth.gleam
··· 1 + import gleam/dynamic/decode 2 + import gleam/http/request 3 + import gleam/httpc 4 + import gleam/json 5 + import gleam/option.{None, Some} 6 + import gleam/result 7 + import gleam/string 8 + 9 + /// UserInfo response from OAuth provider 10 + pub type UserInfo { 11 + UserInfo(sub: String, did: String) 12 + } 13 + 14 + /// ATProto session data from AIP 15 + pub type AtprotoSession { 16 + AtprotoSession(pds_endpoint: String, access_token: String, dpop_jwk: String) 17 + } 18 + 19 + /// Error type for authentication operations 20 + pub type AuthError { 21 + MissingAuthHeader 22 + InvalidAuthHeader 23 + UnauthorizedToken 24 + NetworkError 25 + ParseError 26 + } 27 + 28 + /// Extract bearer token from Authorization header 29 + /// 30 + /// # Example 31 + /// ```gleam 32 + /// extract_bearer_token(request.headers) 33 + /// // Ok("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...") 34 + /// ``` 35 + pub fn extract_bearer_token( 36 + from headers: List(#(String, String)), 37 + ) -> Result(String, AuthError) { 38 + headers 39 + |> list_find(one_that: fn(header) { 40 + string.lowercase(header.0) == "authorization" 41 + }) 42 + |> result.map(fn(header) { header.1 }) 43 + |> result.replace_error(MissingAuthHeader) 44 + |> result.try(fn(auth_value) { 45 + case string.starts_with(auth_value, "Bearer ") { 46 + True -> { 47 + auth_value 48 + |> string.drop_start(7) 49 + |> Ok 50 + } 51 + False -> Error(InvalidAuthHeader) 52 + } 53 + }) 54 + } 55 + 56 + /// Helper function to find in list 57 + fn list_find( 58 + in list: List(a), 59 + one_that predicate: fn(a) -> Bool, 60 + ) -> Result(a, Nil) { 61 + case list { 62 + [] -> Error(Nil) 63 + [first, ..rest] -> 64 + case predicate(first) { 65 + True -> Ok(first) 66 + False -> list_find(rest, predicate) 67 + } 68 + } 69 + } 70 + 71 + /// Verify OAuth token with authorization server 72 + /// 73 + /// Makes a request to the `/oauth/userinfo` endpoint to validate the token 74 + /// and retrieve user information. 75 + pub fn verify_oauth_token( 76 + token: String, 77 + auth_base_url: String, 78 + ) -> Result(UserInfo, AuthError) { 79 + let url = auth_base_url <> "/oauth/userinfo" 80 + 81 + case request.to(url) { 82 + Error(_) -> Error(NetworkError) 83 + Ok(req) -> { 84 + let req = 85 + req 86 + |> request.set_header("authorization", "Bearer " <> token) 87 + 88 + case httpc.send(req) { 89 + Error(_) -> Error(NetworkError) 90 + Ok(resp) -> { 91 + case resp.status { 92 + 200 -> parse_userinfo(resp.body) 93 + _ -> Error(UnauthorizedToken) 94 + } 95 + } 96 + } 97 + } 98 + } 99 + } 100 + 101 + /// Parse userinfo response JSON 102 + fn parse_userinfo(body: String) -> Result(UserInfo, AuthError) { 103 + let decoder = { 104 + use sub <- decode.field("sub", decode.string) 105 + use did_opt <- decode.field("did", decode.optional(decode.string)) 106 + 107 + let did = case did_opt { 108 + Some(d) -> d 109 + None -> "" 110 + } 111 + 112 + decode.success(UserInfo(sub: sub, did: did)) 113 + } 114 + 115 + body 116 + |> json.parse(decoder) 117 + |> result.replace_error(ParseError) 118 + } 119 + 120 + /// Get ATProto session from authorization server 121 + /// 122 + /// Makes a request to the `/api/atprotocol/session` endpoint to retrieve 123 + /// the user's PDS endpoint, access token, and DPoP JWK. 124 + pub fn get_atproto_session( 125 + token: String, 126 + auth_base_url: String, 127 + ) -> Result(AtprotoSession, AuthError) { 128 + let url = auth_base_url <> "/api/atprotocol/session" 129 + 130 + case request.to(url) { 131 + Error(_) -> Error(NetworkError) 132 + Ok(req) -> { 133 + let req = 134 + req 135 + |> request.set_header("authorization", "Bearer " <> token) 136 + 137 + case httpc.send(req) { 138 + Error(_) -> Error(NetworkError) 139 + Ok(resp) -> { 140 + case resp.status { 141 + 200 -> parse_session(resp.body) 142 + _ -> Error(UnauthorizedToken) 143 + } 144 + } 145 + } 146 + } 147 + } 148 + } 149 + 150 + /// Parse ATProto session response JSON 151 + fn parse_session(body: String) -> Result(AtprotoSession, AuthError) { 152 + // Extract dpop_jwk field from the JSON response 153 + // It should be a JSON object, so we need to extract it and re-stringify it 154 + let dpop_jwk_json = case extract_dpop_jwk(body) { 155 + Ok(jwk) -> jwk 156 + Error(_) -> "{}" 157 + } 158 + 159 + let decoder = { 160 + use pds_endpoint <- decode.field("pds_endpoint", decode.string) 161 + use access_token <- decode.field("access_token", decode.string) 162 + 163 + decode.success(AtprotoSession( 164 + pds_endpoint: pds_endpoint, 165 + access_token: access_token, 166 + dpop_jwk: dpop_jwk_json, 167 + )) 168 + } 169 + 170 + body 171 + |> json.parse(decoder) 172 + |> result.replace_error(ParseError) 173 + } 174 + 175 + /// Extract the dpop_jwk field from the session response and convert it to a JSON string 176 + fn extract_dpop_jwk(body: String) -> Result(String, Nil) { 177 + // Find the dpop_jwk field in the JSON 178 + case string.split(body, "\"dpop_jwk\":") { 179 + [_, rest] -> { 180 + // Find the matching closing brace for the dpop_jwk object 181 + case find_json_object(rest) { 182 + Ok(jwk_json) -> Ok(jwk_json) 183 + Error(_) -> Error(Nil) 184 + } 185 + } 186 + _ -> Error(Nil) 187 + } 188 + } 189 + 190 + /// Extract a JSON object from a string starting after the opening brace 191 + fn find_json_object(str: String) -> Result(String, Nil) { 192 + // Skip whitespace and find the opening brace 193 + let trimmed = string.trim_start(str) 194 + case string.starts_with(trimmed, "{") { 195 + False -> Error(Nil) 196 + True -> { 197 + // Count braces to find the matching closing brace 198 + let chars = string.to_graphemes(trimmed) 199 + case find_matching_brace(chars, 0, 0, "") { 200 + Ok(json) -> Ok(json) 201 + Error(_) -> Error(Nil) 202 + } 203 + } 204 + } 205 + } 206 + 207 + /// Find the matching closing brace and extract the JSON object 208 + fn find_matching_brace( 209 + chars: List(String), 210 + depth: Int, 211 + pos: Int, 212 + acc: String, 213 + ) -> Result(String, Nil) { 214 + case chars { 215 + [] -> Error(Nil) 216 + [char, ..rest] -> { 217 + let new_acc = acc <> char 218 + let new_depth = case char { 219 + "{" -> depth + 1 220 + "}" -> depth - 1 221 + _ -> depth 222 + } 223 + 224 + case new_depth { 225 + 0 if pos > 0 -> Ok(new_acc) 226 + _ -> find_matching_brace(rest, new_depth, pos + 1, new_acc) 227 + } 228 + } 229 + } 230 + }
+692
server/src/backfill.gleam
··· 1 + import database 2 + import gleam/dynamic.{type Dynamic} 3 + import gleam/dynamic/decode 4 + import gleam/erlang/process.{type Subject} 5 + import gleam/http/request 6 + import gleam/httpc 7 + import gleam/io 8 + import gleam/json 9 + import gleam/list 10 + import gleam/option.{type Option, None, Some} 11 + import gleam/result 12 + import gleam/string 13 + import gleam/time/duration 14 + import gleam/time/timestamp 15 + import sqlight 16 + 17 + /// Convert a Dynamic value (Erlang term) to JSON string 18 + fn dynamic_to_json(value: Dynamic) -> String { 19 + // Erlang's json:encode returns an iolist, we need to convert it to a string 20 + let iolist = do_json_encode(value) 21 + iolist_to_string(iolist) 22 + } 23 + 24 + /// Encode a dynamic value to JSON (returns iolist) 25 + @external(erlang, "json", "encode") 26 + fn do_json_encode(value: Dynamic) -> Dynamic 27 + 28 + /// Convert an iolist to a string 29 + @external(erlang, "erlang", "iolist_to_binary") 30 + fn iolist_to_binary(iolist: Dynamic) -> Dynamic 31 + 32 + /// Wrapper to convert iolist to string 33 + fn iolist_to_string(iolist: Dynamic) -> String { 34 + let binary = iolist_to_binary(iolist) 35 + // The binary is already a string in Gleam's representation 36 + case decode.run(binary, decode.string) { 37 + Ok(str) -> str 38 + Error(_) -> { 39 + io.println_error("⚠️ Failed to convert iolist to string") 40 + string.inspect(iolist) 41 + } 42 + } 43 + } 44 + 45 + /// ATP data resolved from DID 46 + pub type AtprotoData { 47 + AtprotoData(did: String, handle: String, pds: String) 48 + } 49 + 50 + /// Configuration for backfill operations 51 + pub type BackfillConfig { 52 + BackfillConfig( 53 + plc_directory_url: String, 54 + index_actors: Bool, 55 + max_workers: Int, 56 + ) 57 + } 58 + 59 + /// Creates a default backfill configuration 60 + pub fn default_config() -> BackfillConfig { 61 + BackfillConfig( 62 + plc_directory_url: "https://plc.directory", 63 + index_actors: True, 64 + max_workers: 10, 65 + ) 66 + } 67 + 68 + /// Resolve a DID to get ATP data (PDS endpoint and handle) 69 + pub fn resolve_did(did: String, plc_url: String) -> Result(AtprotoData, String) { 70 + let url = plc_url <> "/" <> did 71 + 72 + case request.to(url) { 73 + Error(_) -> Error("Failed to create request for DID: " <> did) 74 + Ok(req) -> { 75 + case httpc.send(req) { 76 + Error(_) -> Error("Failed to fetch DID data for: " <> did) 77 + Ok(resp) -> { 78 + case resp.status { 79 + 200 -> parse_atproto_data(resp.body, did) 80 + _ -> 81 + Error( 82 + "Failed to resolve DID " 83 + <> did 84 + <> " (status: " 85 + <> string.inspect(resp.status) 86 + <> ")", 87 + ) 88 + } 89 + } 90 + } 91 + } 92 + } 93 + } 94 + 95 + /// Parse ATP data from PLC directory response 96 + fn parse_atproto_data(body: String, did: String) -> Result(AtprotoData, String) { 97 + // Simple decoder that extracts service and alsoKnownAs arrays 98 + let decoder = { 99 + use service_list <- decode.field( 100 + "service", 101 + decode.optional(decode.list(decode.dynamic)), 102 + ) 103 + use handle_list <- decode.field( 104 + "alsoKnownAs", 105 + decode.optional(decode.list(decode.string)), 106 + ) 107 + decode.success(#(service_list, handle_list)) 108 + } 109 + 110 + case json.parse(body, decoder) { 111 + Error(_) -> Error("Failed to parse ATP data for DID: " <> did) 112 + Ok(#(service_list_opt, handle_list_opt)) -> { 113 + // Extract PDS endpoint from service list 114 + let pds = case service_list_opt { 115 + Some(service_list) -> 116 + service_list 117 + |> list.find_map(fn(service_dyn) { 118 + // Try to extract the service endpoint 119 + let service_decoder = { 120 + use service_type <- decode.field("type", decode.string) 121 + use endpoint <- decode.field("serviceEndpoint", decode.string) 122 + decode.success(#(service_type, endpoint)) 123 + } 124 + 125 + case decode.run(service_dyn, service_decoder) { 126 + Ok(#("AtprotoPersonalDataServer", endpoint)) -> Ok(endpoint) 127 + _ -> Error(Nil) 128 + } 129 + }) 130 + |> result.unwrap("https://bsky.social") 131 + None -> "https://bsky.social" 132 + } 133 + 134 + // Extract handle from alsoKnownAs 135 + let handle = case handle_list_opt { 136 + Some(handle_list) -> 137 + handle_list 138 + |> list.find(fn(h) { string.starts_with(h, "at://") }) 139 + |> result.map(fn(h) { string.replace(h, "at://", "") }) 140 + |> result.unwrap(did) 141 + None -> did 142 + } 143 + 144 + Ok(AtprotoData(did: did, handle: handle, pds: pds)) 145 + } 146 + } 147 + } 148 + 149 + /// Worker function that resolves a DID and sends result back 150 + fn resolve_did_worker( 151 + did: String, 152 + plc_url: String, 153 + reply_to: Subject(Result(AtprotoData, Nil)), 154 + ) -> Nil { 155 + let result = case resolve_did(did, plc_url) { 156 + Ok(atp_data) -> Ok(atp_data) 157 + Error(err) -> { 158 + io.println_error("Error resolving DID " <> did <> ": " <> err) 159 + Error(Nil) 160 + } 161 + } 162 + process.send(reply_to, result) 163 + } 164 + 165 + /// Get ATP data for a list of repos (DIDs) - concurrent version 166 + pub fn get_atp_data_for_repos( 167 + repos: List(String), 168 + config: BackfillConfig, 169 + ) -> List(AtprotoData) { 170 + // Split repos into chunks based on max_workers to limit concurrency 171 + let chunk_size = case list.length(repos) { 172 + n if n <= config.max_workers -> 1 173 + n -> { n + config.max_workers - 1 } / config.max_workers 174 + } 175 + 176 + repos 177 + |> list.sized_chunk(chunk_size) 178 + |> list.flat_map(fn(chunk) { 179 + // Process each chunk concurrently 180 + let subject = process.new_subject() 181 + 182 + // Spawn workers for this chunk 183 + let _workers = 184 + chunk 185 + |> list.map(fn(repo) { 186 + process.spawn_unlinked(fn() { 187 + resolve_did_worker(repo, config.plc_directory_url, subject) 188 + }) 189 + }) 190 + 191 + // Collect results from all workers in this chunk 192 + list.range(1, list.length(chunk)) 193 + |> list.filter_map(fn(_) { 194 + case process.receive(subject, 30_000) { 195 + Ok(result) -> result 196 + Error(_) -> Error(Nil) 197 + } 198 + }) 199 + }) 200 + } 201 + 202 + /// Fetch records for a single repo and collection with pagination 203 + pub fn fetch_records_for_repo_collection( 204 + repo: String, 205 + collection: String, 206 + pds_url: String, 207 + ) -> List(database.Record) { 208 + fetch_records_paginated(repo, collection, pds_url, None, []) 209 + } 210 + 211 + /// Helper function for paginated record fetching 212 + fn fetch_records_paginated( 213 + repo: String, 214 + collection: String, 215 + pds_url: String, 216 + cursor: Option(String), 217 + acc: List(database.Record), 218 + ) -> List(database.Record) { 219 + // Build URL with query parameters 220 + let base_url = 221 + pds_url 222 + <> "/xrpc/com.atproto.repo.listRecords?repo=" 223 + <> repo 224 + <> "&collection=" 225 + <> collection 226 + <> "&limit=100" 227 + 228 + let url = case cursor { 229 + Some(c) -> base_url <> "&cursor=" <> c 230 + None -> base_url 231 + } 232 + 233 + case request.to(url) { 234 + Error(_) -> { 235 + io.println_error("Failed to create request for: " <> url) 236 + acc 237 + } 238 + Ok(req) -> { 239 + case httpc.send(req) { 240 + Error(_) -> { 241 + io.println_error( 242 + "Failed to fetch records for " <> repo <> "/" <> collection, 243 + ) 244 + acc 245 + } 246 + Ok(resp) -> { 247 + case resp.status { 248 + 200 -> { 249 + case parse_list_records_response(resp.body, repo, collection) { 250 + Ok(#(records, next_cursor)) -> { 251 + let new_acc = list.append(acc, records) 252 + case next_cursor { 253 + Some(c) -> 254 + fetch_records_paginated( 255 + repo, 256 + collection, 257 + pds_url, 258 + Some(c), 259 + new_acc, 260 + ) 261 + None -> new_acc 262 + } 263 + } 264 + Error(err) -> { 265 + io.println_error( 266 + "Failed to parse records for " 267 + <> repo 268 + <> "/" 269 + <> collection 270 + <> ": " 271 + <> err, 272 + ) 273 + acc 274 + } 275 + } 276 + } 277 + _ -> { 278 + io.println_error( 279 + "Failed to fetch records for " 280 + <> repo 281 + <> "/" 282 + <> collection 283 + <> " (status: " 284 + <> string.inspect(resp.status) 285 + <> ")", 286 + ) 287 + acc 288 + } 289 + } 290 + } 291 + } 292 + } 293 + } 294 + } 295 + 296 + /// Parse the response from com.atproto.repo.listRecords 297 + fn parse_list_records_response( 298 + body: String, 299 + repo: String, 300 + collection: String, 301 + ) -> Result(#(List(database.Record), Option(String)), String) { 302 + let decoder = { 303 + use records <- decode.field( 304 + "records", 305 + decode.list({ 306 + use uri <- decode.field("uri", decode.string) 307 + use cid <- decode.field("cid", decode.string) 308 + use value <- decode.field("value", decode.dynamic) 309 + decode.success(#(uri, cid, value)) 310 + }), 311 + ) 312 + 313 + decode.success(records) 314 + } 315 + 316 + // Parse the records first 317 + case json.parse(body, decoder) { 318 + Error(err) -> { 319 + io.println_error("Failed to parse records: " <> string.inspect(err)) 320 + io.println_error("Response body snippet: " <> string.slice(body, 0, 200)) 321 + Error("Failed to parse listRecords response") 322 + } 323 + Ok(record_tuples) -> { 324 + // Try to extract cursor separately (it might not exist in the JSON) 325 + let cursor_decoder = { 326 + use cursor <- decode.field("cursor", decode.optional(decode.string)) 327 + decode.success(cursor) 328 + } 329 + 330 + let cursor = case json.parse(body, cursor_decoder) { 331 + Ok(c) -> c 332 + Error(_) -> None 333 + } 334 + 335 + let now = 336 + timestamp.system_time() 337 + |> timestamp.to_rfc3339(duration.seconds(0)) 338 + let records = 339 + record_tuples 340 + |> list.map(fn(tuple) { 341 + let #(uri, cid, value) = tuple 342 + database.Record( 343 + uri: uri, 344 + cid: cid, 345 + did: repo, 346 + collection: collection, 347 + json: dynamic_to_json(value), 348 + indexed_at: now, 349 + ) 350 + }) 351 + 352 + Ok(#(records, cursor)) 353 + } 354 + } 355 + } 356 + 357 + /// Worker function that fetches records for a repo/collection pair 358 + fn fetch_records_worker( 359 + repo: String, 360 + collection: String, 361 + pds: String, 362 + reply_to: Subject(List(database.Record)), 363 + ) -> Nil { 364 + let records = fetch_records_for_repo_collection(repo, collection, pds) 365 + process.send(reply_to, records) 366 + } 367 + 368 + /// Get all records for multiple repos and collections - concurrent version 369 + pub fn get_records_for_repos( 370 + repos: List(String), 371 + collections: List(String), 372 + atp_data: List(AtprotoData), 373 + config: BackfillConfig, 374 + ) -> List(database.Record) { 375 + // Create all repo/collection job pairs 376 + let jobs = 377 + repos 378 + |> list.flat_map(fn(repo) { 379 + case list.find(atp_data, fn(data) { data.did == repo }) { 380 + Error(_) -> { 381 + io.println_error("No ATP data found for repo: " <> repo) 382 + [] 383 + } 384 + Ok(data) -> { 385 + collections 386 + |> list.map(fn(collection) { #(repo, collection, data.pds) }) 387 + } 388 + } 389 + }) 390 + 391 + // Split jobs into chunks based on max_workers 392 + let chunk_size = case list.length(jobs) { 393 + 0 -> 1 394 + n if n <= config.max_workers -> 1 395 + n -> { n + config.max_workers - 1 } / config.max_workers 396 + } 397 + 398 + jobs 399 + |> list.sized_chunk(chunk_size) 400 + |> list.flat_map(fn(chunk) { 401 + // Process each chunk concurrently 402 + let subject = process.new_subject() 403 + 404 + // Spawn workers for this chunk 405 + let _workers = 406 + chunk 407 + |> list.map(fn(job) { 408 + let #(repo, collection, pds) = job 409 + process.spawn_unlinked(fn() { 410 + fetch_records_worker(repo, collection, pds, subject) 411 + }) 412 + }) 413 + 414 + // Collect results from all workers in this chunk 415 + list.range(1, list.length(chunk)) 416 + |> list.flat_map(fn(_) { 417 + case process.receive(subject, 60_000) { 418 + Ok(records) -> records 419 + Error(_) -> [] 420 + } 421 + }) 422 + }) 423 + } 424 + 425 + /// Index records into the database 426 + pub fn index_records( 427 + records: List(database.Record), 428 + conn: sqlight.Connection, 429 + ) -> Nil { 430 + records 431 + |> list.each(fn(record) { 432 + case 433 + database.insert_record( 434 + conn, 435 + record.uri, 436 + record.cid, 437 + record.did, 438 + record.collection, 439 + record.json, 440 + ) 441 + { 442 + Ok(_) -> Nil 443 + Error(err) -> { 444 + io.println_error( 445 + "Failed to insert record " 446 + <> record.uri 447 + <> ": " 448 + <> string.inspect(err), 449 + ) 450 + } 451 + } 452 + }) 453 + } 454 + 455 + /// Index actors into the database 456 + pub fn index_actors( 457 + atp_data: List(AtprotoData), 458 + conn: sqlight.Connection, 459 + ) -> Nil { 460 + atp_data 461 + |> list.each(fn(data) { 462 + case database.upsert_actor(conn, data.did, data.handle) { 463 + Ok(_) -> Nil 464 + Error(err) -> { 465 + io.println_error( 466 + "Failed to upsert actor " <> data.did <> ": " <> string.inspect(err), 467 + ) 468 + } 469 + } 470 + }) 471 + } 472 + 473 + /// Fetch repos that have records for a specific collection from the relay with pagination 474 + fn fetch_repos_for_collection( 475 + collection: String, 476 + ) -> Result(List(String), String) { 477 + fetch_repos_paginated(collection, None, []) 478 + } 479 + 480 + /// Helper function for paginated repo fetching 481 + fn fetch_repos_paginated( 482 + collection: String, 483 + cursor: Option(String), 484 + acc: List(String), 485 + ) -> Result(List(String), String) { 486 + // Build URL with large limit and cursor 487 + let base_url = 488 + "https://relay1.us-west.bsky.network/xrpc/com.atproto.sync.listReposByCollection?collection=" 489 + <> collection 490 + <> "&limit=1000" 491 + 492 + let url = case cursor { 493 + Some(c) -> base_url <> "&cursor=" <> c 494 + None -> base_url 495 + } 496 + 497 + case request.to(url) { 498 + Error(_) -> Error("Failed to create request for collection: " <> collection) 499 + Ok(req) -> { 500 + case httpc.send(req) { 501 + Error(_) -> 502 + Error("Failed to fetch repos for collection: " <> collection) 503 + Ok(resp) -> { 504 + case resp.status { 505 + 200 -> { 506 + case parse_repos_response(resp.body) { 507 + Ok(#(repos, next_cursor)) -> { 508 + let new_acc = list.append(acc, repos) 509 + case next_cursor { 510 + Some(c) -> 511 + fetch_repos_paginated(collection, Some(c), new_acc) 512 + None -> { 513 + io.println( 514 + "✓ Found " 515 + <> string.inspect(list.length(new_acc)) 516 + <> " total repositories for collection \"" 517 + <> collection 518 + <> "\"", 519 + ) 520 + Ok(new_acc) 521 + } 522 + } 523 + } 524 + Error(err) -> Error(err) 525 + } 526 + } 527 + _ -> 528 + Error( 529 + "Failed to fetch repos for collection " 530 + <> collection 531 + <> " (status: " 532 + <> string.inspect(resp.status) 533 + <> ")", 534 + ) 535 + } 536 + } 537 + } 538 + } 539 + } 540 + } 541 + 542 + /// Parse the response from com.atproto.sync.listReposByCollection 543 + fn parse_repos_response( 544 + body: String, 545 + ) -> Result(#(List(String), Option(String)), String) { 546 + let decoder = { 547 + use repos <- decode.field( 548 + "repos", 549 + decode.list({ 550 + use did <- decode.field("did", decode.string) 551 + decode.success(did) 552 + }), 553 + ) 554 + decode.success(repos) 555 + } 556 + 557 + // Parse repos first 558 + case json.parse(body, decoder) { 559 + Error(_) -> Error("Failed to parse repos response") 560 + Ok(repos) -> { 561 + // Try to extract cursor separately 562 + let cursor_decoder = { 563 + use cursor <- decode.field("cursor", decode.optional(decode.string)) 564 + decode.success(cursor) 565 + } 566 + 567 + let cursor = case json.parse(body, cursor_decoder) { 568 + Ok(c) -> c 569 + Error(_) -> None 570 + } 571 + 572 + Ok(#(repos, cursor)) 573 + } 574 + } 575 + } 576 + 577 + /// Main backfill function - backfill collections for specified repos 578 + pub fn backfill_collections( 579 + repos: List(String), 580 + collections: List(String), 581 + external_collections: List(String), 582 + config: BackfillConfig, 583 + conn: sqlight.Connection, 584 + ) -> Nil { 585 + io.println("") 586 + io.println("🔄 Starting backfill operation") 587 + 588 + case collections { 589 + [] -> io.println("⚠️ No collections specified for backfill") 590 + _ -> 591 + io.println( 592 + "📚 Processing " 593 + <> string.inspect(list.length(collections)) 594 + <> " collections: " 595 + <> string.join(collections, ", "), 596 + ) 597 + } 598 + 599 + case external_collections { 600 + [] -> Nil 601 + _ -> 602 + io.println( 603 + "🌐 Including " 604 + <> string.inspect(list.length(external_collections)) 605 + <> " external collections: " 606 + <> string.join(external_collections, ", "), 607 + ) 608 + } 609 + 610 + // Determine which repos to use 611 + let all_repos = case repos { 612 + [] -> { 613 + // Fetch repos for all collections from the relay 614 + io.println("📊 Fetching repositories for collections...") 615 + let fetched_repos = 616 + collections 617 + |> list.filter_map(fn(collection) { 618 + case fetch_repos_for_collection(collection) { 619 + Ok(repos) -> Ok(repos) 620 + Error(err) -> { 621 + io.println_error(err) 622 + Error(Nil) 623 + } 624 + } 625 + }) 626 + |> list.flatten 627 + |> list.unique 628 + 629 + io.println( 630 + "📋 Processing " 631 + <> string.inspect(list.length(fetched_repos)) 632 + <> " unique repositories", 633 + ) 634 + fetched_repos 635 + } 636 + provided_repos -> { 637 + io.println( 638 + "📋 Using " 639 + <> string.inspect(list.length(provided_repos)) 640 + <> " provided repositories", 641 + ) 642 + provided_repos 643 + } 644 + } 645 + 646 + // Get ATP data for all repos 647 + io.println("🔍 Resolving ATP data for repositories...") 648 + let atp_data = get_atp_data_for_repos(all_repos, config) 649 + io.println( 650 + "✓ Resolved ATP data for " 651 + <> string.inspect(list.length(atp_data)) 652 + <> "/" 653 + <> string.inspect(list.length(all_repos)) 654 + <> " repositories", 655 + ) 656 + 657 + // Get all records for all repos and collections (main collections only) 658 + io.println("📥 Fetching records for repositories and collections...") 659 + let main_records = 660 + get_records_for_repos(all_repos, collections, atp_data, config) 661 + 662 + // Get external collections for the same repos 663 + let external_records = case external_collections { 664 + [] -> [] 665 + _ -> 666 + get_records_for_repos(all_repos, external_collections, atp_data, config) 667 + } 668 + 669 + let all_records = list.append(main_records, external_records) 670 + io.println( 671 + "✓ Fetched " <> string.inspect(list.length(all_records)) <> " total records", 672 + ) 673 + 674 + // Index actors (if enabled in config) 675 + case config.index_actors { 676 + True -> { 677 + io.println("📝 Indexing actors...") 678 + index_actors(atp_data, conn) 679 + io.println( 680 + "✓ Indexed " <> string.inspect(list.length(atp_data)) <> " actors", 681 + ) 682 + } 683 + False -> io.println("⏭️ Skipping actor indexing (disabled in config)") 684 + } 685 + 686 + // Index records 687 + io.println( 688 + "📝 Indexing " <> string.inspect(list.length(all_records)) <> " records...", 689 + ) 690 + index_records(all_records, conn) 691 + io.println("✅ Backfill complete!") 692 + }
+571
server/src/database.gleam
··· 1 + import gleam/dynamic/decode 2 + import gleam/io 3 + import gleam/result 4 + import sqlight 5 + 6 + pub type Record { 7 + Record( 8 + uri: String, 9 + cid: String, 10 + did: String, 11 + collection: String, 12 + json: String, 13 + indexed_at: String, 14 + ) 15 + } 16 + 17 + pub type Actor { 18 + Actor(did: String, handle: String, indexed_at: String) 19 + } 20 + 21 + pub type Lexicon { 22 + Lexicon(id: String, json: String, created_at: String) 23 + } 24 + 25 + /// Opens a connection to the SQLite database 26 + pub fn connect(path: String) -> Result(sqlight.Connection, sqlight.Error) { 27 + sqlight.open(path) 28 + } 29 + 30 + /// Creates the record table if it doesn't exist 31 + pub fn create_record_table( 32 + conn: sqlight.Connection, 33 + ) -> Result(Nil, sqlight.Error) { 34 + let create_table_sql = 35 + " 36 + CREATE TABLE IF NOT EXISTS record ( 37 + uri TEXT PRIMARY KEY NOT NULL, 38 + cid TEXT NOT NULL, 39 + did TEXT NOT NULL, 40 + collection TEXT NOT NULL, 41 + json TEXT NOT NULL, 42 + indexed_at TEXT NOT NULL DEFAULT (datetime('now')) 43 + ) 44 + " 45 + 46 + let create_did_index_sql = 47 + " 48 + CREATE INDEX IF NOT EXISTS idx_record_did 49 + ON record(did) 50 + " 51 + 52 + let create_collection_index_sql = 53 + " 54 + CREATE INDEX IF NOT EXISTS idx_record_collection 55 + ON record(collection) 56 + " 57 + 58 + let create_did_collection_index_sql = 59 + " 60 + CREATE INDEX IF NOT EXISTS idx_record_did_collection 61 + ON record(did, collection) 62 + " 63 + 64 + let create_indexed_at_index_sql = 65 + " 66 + CREATE INDEX IF NOT EXISTS idx_record_indexed_at 67 + ON record(indexed_at DESC) 68 + " 69 + 70 + use _ <- result.try(sqlight.exec(create_table_sql, conn)) 71 + use _ <- result.try(sqlight.exec(create_did_index_sql, conn)) 72 + use _ <- result.try(sqlight.exec(create_collection_index_sql, conn)) 73 + use _ <- result.try(sqlight.exec(create_did_collection_index_sql, conn)) 74 + use _ <- result.try(sqlight.exec(create_indexed_at_index_sql, conn)) 75 + Ok(Nil) 76 + } 77 + 78 + /// Creates the actor table if it doesn't exist 79 + pub fn create_actor_table( 80 + conn: sqlight.Connection, 81 + ) -> Result(Nil, sqlight.Error) { 82 + let create_table_sql = 83 + " 84 + CREATE TABLE IF NOT EXISTS actor ( 85 + did TEXT PRIMARY KEY NOT NULL, 86 + handle TEXT, 87 + indexed_at TEXT NOT NULL 88 + ) 89 + " 90 + 91 + let create_handle_index_sql = 92 + " 93 + CREATE INDEX IF NOT EXISTS idx_actor_handle 94 + ON actor(handle) 95 + " 96 + 97 + let create_indexed_at_index_sql = 98 + " 99 + CREATE INDEX IF NOT EXISTS idx_actor_indexed_at 100 + ON actor(indexed_at DESC) 101 + " 102 + 103 + use _ <- result.try(sqlight.exec(create_table_sql, conn)) 104 + use _ <- result.try(sqlight.exec(create_handle_index_sql, conn)) 105 + use _ <- result.try(sqlight.exec(create_indexed_at_index_sql, conn)) 106 + Ok(Nil) 107 + } 108 + 109 + /// Creates the lexicon table if it doesn't exist 110 + pub fn create_lexicon_table( 111 + conn: sqlight.Connection, 112 + ) -> Result(Nil, sqlight.Error) { 113 + let create_table_sql = 114 + " 115 + CREATE TABLE IF NOT EXISTS lexicon ( 116 + id TEXT PRIMARY KEY NOT NULL, 117 + json TEXT NOT NULL, 118 + created_at TEXT NOT NULL DEFAULT (datetime('now')) 119 + ) 120 + " 121 + 122 + let create_created_at_index_sql = 123 + " 124 + CREATE INDEX IF NOT EXISTS idx_lexicon_created_at 125 + ON lexicon(created_at DESC) 126 + " 127 + 128 + use _ <- result.try(sqlight.exec(create_table_sql, conn)) 129 + use _ <- result.try(sqlight.exec(create_created_at_index_sql, conn)) 130 + Ok(Nil) 131 + } 132 + 133 + /// Initializes the database with all required tables 134 + pub fn initialize(path: String) -> Result(sqlight.Connection, sqlight.Error) { 135 + use conn <- result.try(connect(path)) 136 + use _ <- result.try(create_record_table(conn)) 137 + use _ <- result.try(create_actor_table(conn)) 138 + use _ <- result.try(create_lexicon_table(conn)) 139 + 140 + io.println("✅ Database initialized at: " <> path) 141 + Ok(conn) 142 + } 143 + 144 + /// Inserts or updates a record in the database 145 + pub fn insert_record( 146 + conn: sqlight.Connection, 147 + uri: String, 148 + cid: String, 149 + did: String, 150 + collection: String, 151 + json: String, 152 + ) -> Result(Nil, sqlight.Error) { 153 + let sql = 154 + " 155 + INSERT INTO record (uri, cid, did, collection, json) 156 + VALUES (?, ?, ?, ?, ?) 157 + ON CONFLICT(uri) DO UPDATE SET 158 + cid = excluded.cid, 159 + json = excluded.json, 160 + indexed_at = datetime('now') 161 + " 162 + 163 + use _ <- result.try(sqlight.query( 164 + sql, 165 + on: conn, 166 + with: [ 167 + sqlight.text(uri), 168 + sqlight.text(cid), 169 + sqlight.text(did), 170 + sqlight.text(collection), 171 + sqlight.text(json), 172 + ], 173 + expecting: decode.string, 174 + )) 175 + Ok(Nil) 176 + } 177 + 178 + /// Gets a record by URI 179 + pub fn get_record( 180 + conn: sqlight.Connection, 181 + uri: String, 182 + ) -> Result(List(Record), sqlight.Error) { 183 + let sql = 184 + " 185 + SELECT uri, cid, did, collection, json, indexed_at 186 + FROM record 187 + WHERE uri = ? 188 + " 189 + 190 + let decoder = { 191 + use uri <- decode.field(0, decode.string) 192 + use cid <- decode.field(1, decode.string) 193 + use did <- decode.field(2, decode.string) 194 + use collection <- decode.field(3, decode.string) 195 + use json <- decode.field(4, decode.string) 196 + use indexed_at <- decode.field(5, decode.string) 197 + decode.success(Record(uri:, cid:, did:, collection:, json:, indexed_at:)) 198 + } 199 + 200 + sqlight.query(sql, on: conn, with: [sqlight.text(uri)], expecting: decoder) 201 + } 202 + 203 + /// Gets all records for a specific DID 204 + pub fn get_records_by_did( 205 + conn: sqlight.Connection, 206 + did: String, 207 + ) -> Result(List(Record), sqlight.Error) { 208 + let sql = 209 + " 210 + SELECT uri, cid, did, collection, json, indexed_at 211 + FROM record 212 + WHERE did = ? 213 + ORDER BY indexed_at DESC 214 + " 215 + 216 + let decoder = { 217 + use uri <- decode.field(0, decode.string) 218 + use cid <- decode.field(1, decode.string) 219 + use did <- decode.field(2, decode.string) 220 + use collection <- decode.field(3, decode.string) 221 + use json <- decode.field(4, decode.string) 222 + use indexed_at <- decode.field(5, decode.string) 223 + decode.success(Record(uri:, cid:, did:, collection:, json:, indexed_at:)) 224 + } 225 + 226 + sqlight.query(sql, on: conn, with: [sqlight.text(did)], expecting: decoder) 227 + } 228 + 229 + /// Gets all records for a specific collection 230 + pub fn get_records_by_collection( 231 + conn: sqlight.Connection, 232 + collection: String, 233 + ) -> Result(List(Record), sqlight.Error) { 234 + let sql = 235 + " 236 + SELECT uri, cid, did, collection, json, indexed_at 237 + FROM record 238 + WHERE collection = ? 239 + ORDER BY indexed_at DESC 240 + LIMIT 100 241 + " 242 + 243 + let decoder = { 244 + use uri <- decode.field(0, decode.string) 245 + use cid <- decode.field(1, decode.string) 246 + use did <- decode.field(2, decode.string) 247 + use collection <- decode.field(3, decode.string) 248 + use json <- decode.field(4, decode.string) 249 + use indexed_at <- decode.field(5, decode.string) 250 + decode.success(Record(uri:, cid:, did:, collection:, json:, indexed_at:)) 251 + } 252 + 253 + sqlight.query( 254 + sql, 255 + on: conn, 256 + with: [sqlight.text(collection)], 257 + expecting: decoder, 258 + ) 259 + } 260 + 261 + /// Deletes a record by URI (hard delete) 262 + pub fn delete_record( 263 + conn: sqlight.Connection, 264 + uri: String, 265 + ) -> Result(Nil, sqlight.Error) { 266 + let sql = 267 + " 268 + DELETE FROM record 269 + WHERE uri = ? 270 + " 271 + 272 + use _ <- result.try(sqlight.query( 273 + sql, 274 + on: conn, 275 + with: [sqlight.text(uri)], 276 + expecting: decode.string, 277 + )) 278 + Ok(Nil) 279 + } 280 + 281 + /// Inserts or updates an actor in the database 282 + pub fn upsert_actor( 283 + conn: sqlight.Connection, 284 + did: String, 285 + handle: String, 286 + ) -> Result(Nil, sqlight.Error) { 287 + let sql = 288 + " 289 + INSERT INTO actor (did, handle, indexed_at) 290 + VALUES (?, ?, datetime('now')) 291 + ON CONFLICT(did) DO UPDATE SET 292 + handle = excluded.handle, 293 + indexed_at = excluded.indexed_at 294 + " 295 + 296 + use _ <- result.try(sqlight.query( 297 + sql, 298 + on: conn, 299 + with: [sqlight.text(did), sqlight.text(handle)], 300 + expecting: decode.string, 301 + )) 302 + Ok(Nil) 303 + } 304 + 305 + /// Gets an actor by DID 306 + pub fn get_actor( 307 + conn: sqlight.Connection, 308 + did: String, 309 + ) -> Result(List(Actor), sqlight.Error) { 310 + let sql = 311 + " 312 + SELECT did, handle, indexed_at 313 + FROM actor 314 + WHERE did = ? 315 + " 316 + 317 + let decoder = { 318 + use did <- decode.field(0, decode.string) 319 + use handle <- decode.field(1, decode.string) 320 + use indexed_at <- decode.field(2, decode.string) 321 + decode.success(Actor(did:, handle:, indexed_at:)) 322 + } 323 + 324 + sqlight.query(sql, on: conn, with: [sqlight.text(did)], expecting: decoder) 325 + } 326 + 327 + /// Gets an actor by handle 328 + pub fn get_actor_by_handle( 329 + conn: sqlight.Connection, 330 + handle: String, 331 + ) -> Result(List(Actor), sqlight.Error) { 332 + let sql = 333 + " 334 + SELECT did, handle, indexed_at 335 + FROM actor 336 + WHERE handle = ? 337 + " 338 + 339 + let decoder = { 340 + use did <- decode.field(0, decode.string) 341 + use handle <- decode.field(1, decode.string) 342 + use indexed_at <- decode.field(2, decode.string) 343 + decode.success(Actor(did:, handle:, indexed_at:)) 344 + } 345 + 346 + sqlight.query(sql, on: conn, with: [sqlight.text(handle)], expecting: decoder) 347 + } 348 + 349 + pub type CollectionStat { 350 + CollectionStat(collection: String, count: Int) 351 + } 352 + 353 + /// Gets statistics for all collections (collection name and record count) 354 + pub fn get_collection_stats( 355 + conn: sqlight.Connection, 356 + ) -> Result(List(CollectionStat), sqlight.Error) { 357 + let sql = 358 + " 359 + SELECT collection, COUNT(*) as count 360 + FROM record 361 + GROUP BY collection 362 + ORDER BY count DESC 363 + " 364 + 365 + let decoder = { 366 + use collection <- decode.field(0, decode.string) 367 + use count <- decode.field(1, decode.int) 368 + decode.success(CollectionStat(collection:, count:)) 369 + } 370 + 371 + sqlight.query(sql, on: conn, with: [], expecting: decoder) 372 + } 373 + 374 + /// Gets the total number of actors in the database 375 + pub fn get_actor_count(conn: sqlight.Connection) -> Result(Int, sqlight.Error) { 376 + let sql = 377 + " 378 + SELECT COUNT(*) as count 379 + FROM actor 380 + " 381 + 382 + let decoder = { 383 + use count <- decode.field(0, decode.int) 384 + decode.success(count) 385 + } 386 + 387 + case sqlight.query(sql, on: conn, with: [], expecting: decoder) { 388 + Ok([count]) -> Ok(count) 389 + Ok(_) -> Ok(0) 390 + Error(err) -> Error(err) 391 + } 392 + } 393 + 394 + /// Checks if a lexicon exists for a given collection NSID 395 + /// First checks the dedicated lexicon table, then falls back to record table 396 + pub fn has_lexicon_for_collection( 397 + conn: sqlight.Connection, 398 + collection: String, 399 + ) -> Result(Bool, sqlight.Error) { 400 + // First check lexicon table (direct lookup is faster) 401 + case has_lexicon(conn, collection) { 402 + Ok(True) -> Ok(True) 403 + Ok(False) -> { 404 + // Fall back to searching record table for backward compatibility 405 + let sql = 406 + " 407 + SELECT COUNT(*) as count 408 + FROM record 409 + WHERE collection = 'com.atproto.lexicon.schema' 410 + AND json LIKE ? 411 + " 412 + 413 + let decoder = { 414 + use count <- decode.field(0, decode.int) 415 + decode.success(count) 416 + } 417 + 418 + let pattern = "%" <> collection <> "%" 419 + 420 + case 421 + sqlight.query( 422 + sql, 423 + on: conn, 424 + with: [sqlight.text(pattern)], 425 + expecting: decoder, 426 + ) 427 + { 428 + Ok([count]) -> Ok(count > 0) 429 + Ok(_) -> Ok(False) 430 + Error(err) -> Error(err) 431 + } 432 + } 433 + Error(err) -> Error(err) 434 + } 435 + } 436 + 437 + /// Inserts or updates a lexicon in the database 438 + pub fn insert_lexicon( 439 + conn: sqlight.Connection, 440 + id: String, 441 + json: String, 442 + ) -> Result(Nil, sqlight.Error) { 443 + let sql = 444 + " 445 + INSERT INTO lexicon (id, json, created_at) 446 + VALUES (?, ?, datetime('now')) 447 + ON CONFLICT(id) DO UPDATE SET 448 + json = excluded.json, 449 + created_at = datetime('now') 450 + " 451 + 452 + use _ <- result.try(sqlight.query( 453 + sql, 454 + on: conn, 455 + with: [sqlight.text(id), sqlight.text(json)], 456 + expecting: decode.string, 457 + )) 458 + Ok(Nil) 459 + } 460 + 461 + /// Gets a lexicon by ID 462 + pub fn get_lexicon( 463 + conn: sqlight.Connection, 464 + id: String, 465 + ) -> Result(List(Lexicon), sqlight.Error) { 466 + let sql = 467 + " 468 + SELECT id, json, created_at 469 + FROM lexicon 470 + WHERE id = ? 471 + " 472 + 473 + let decoder = { 474 + use id <- decode.field(0, decode.string) 475 + use json <- decode.field(1, decode.string) 476 + use created_at <- decode.field(2, decode.string) 477 + decode.success(Lexicon(id:, json:, created_at:)) 478 + } 479 + 480 + sqlight.query(sql, on: conn, with: [sqlight.text(id)], expecting: decoder) 481 + } 482 + 483 + /// Gets all lexicons from the database 484 + pub fn get_all_lexicons( 485 + conn: sqlight.Connection, 486 + ) -> Result(List(Lexicon), sqlight.Error) { 487 + let sql = 488 + " 489 + SELECT id, json, created_at 490 + FROM lexicon 491 + ORDER BY created_at DESC 492 + " 493 + 494 + let decoder = { 495 + use id <- decode.field(0, decode.string) 496 + use json <- decode.field(1, decode.string) 497 + use created_at <- decode.field(2, decode.string) 498 + decode.success(Lexicon(id:, json:, created_at:)) 499 + } 500 + 501 + sqlight.query(sql, on: conn, with: [], expecting: decoder) 502 + } 503 + 504 + /// Checks if a lexicon exists by ID 505 + pub fn has_lexicon( 506 + conn: sqlight.Connection, 507 + id: String, 508 + ) -> Result(Bool, sqlight.Error) { 509 + let sql = 510 + " 511 + SELECT COUNT(*) as count 512 + FROM lexicon 513 + WHERE id = ? 514 + " 515 + 516 + let decoder = { 517 + use count <- decode.field(0, decode.int) 518 + decode.success(count) 519 + } 520 + 521 + case 522 + sqlight.query(sql, on: conn, with: [sqlight.text(id)], expecting: decoder) 523 + { 524 + Ok([count]) -> Ok(count > 0) 525 + Ok(_) -> Ok(False) 526 + Error(err) -> Error(err) 527 + } 528 + } 529 + 530 + /// Gets the total number of lexicons in the database 531 + pub fn get_lexicon_count(conn: sqlight.Connection) -> Result(Int, sqlight.Error) { 532 + let sql = 533 + " 534 + SELECT COUNT(*) as count 535 + FROM lexicon 536 + " 537 + 538 + let decoder = { 539 + use count <- decode.field(0, decode.int) 540 + decode.success(count) 541 + } 542 + 543 + case sqlight.query(sql, on: conn, with: [], expecting: decoder) { 544 + Ok([count]) -> Ok(count) 545 + Ok(_) -> Ok(0) 546 + Error(err) -> Error(err) 547 + } 548 + } 549 + 550 + /// Gets all lexicons that are of type "record" (collections) 551 + pub fn get_record_type_lexicons( 552 + conn: sqlight.Connection, 553 + ) -> Result(List(Lexicon), sqlight.Error) { 554 + let sql = 555 + " 556 + SELECT id, json, created_at 557 + FROM lexicon 558 + WHERE json LIKE '%\"type\":\"record\"%' 559 + OR json LIKE '%\"type\": \"record\"%' 560 + ORDER BY id ASC 561 + " 562 + 563 + let decoder = { 564 + use id <- decode.field(0, decode.string) 565 + use json <- decode.field(1, decode.string) 566 + use created_at <- decode.field(2, decode.string) 567 + decode.success(Lexicon(id:, json:, created_at:)) 568 + } 569 + 570 + sqlight.query(sql, on: conn, with: [], expecting: decoder) 571 + }
+125
server/src/dpop.gleam
··· 1 + import atproto_auth.{type AtprotoSession} 2 + import gleam/http.{type Method, Delete, Get, Head, Options, Patch, Post, Put} 3 + import gleam/http/request 4 + import gleam/http/response.{type Response} 5 + import gleam/httpc 6 + import gleam/list 7 + import gleam/option.{None, Some} 8 + import gleam/string 9 + import jose_wrapper 10 + 11 + /// Make an authenticated DPoP request to a PDS with nonce retry support 12 + /// 13 + /// This function creates a DPoP proof token and makes an authenticated 14 + /// request with the appropriate headers. If the server responds with 15 + /// use_dpop_nonce error, it retries with the provided nonce. 16 + pub fn make_dpop_request( 17 + method: String, 18 + url: String, 19 + session: AtprotoSession, 20 + body: String, 21 + ) -> Result(Response(String), String) { 22 + // First attempt without nonce 23 + case make_dpop_request_with_nonce(method, url, session, body, None) { 24 + Ok(resp) -> { 25 + // Check if response is 401 with use_dpop_nonce error 26 + case resp.status { 27 + 401 -> { 28 + // Check if body contains use_dpop_nonce error 29 + case string.contains(resp.body, "use_dpop_nonce") { 30 + True -> { 31 + // Extract DPoP-Nonce header and retry 32 + case get_dpop_nonce_header(resp.headers) { 33 + Some(nonce) -> { 34 + make_dpop_request_with_nonce( 35 + method, 36 + url, 37 + session, 38 + body, 39 + Some(nonce), 40 + ) 41 + } 42 + None -> Ok(resp) 43 + } 44 + } 45 + False -> Ok(resp) 46 + } 47 + } 48 + _ -> Ok(resp) 49 + } 50 + } 51 + Error(err) -> Error(err) 52 + } 53 + } 54 + 55 + /// Make DPoP request with optional nonce 56 + fn make_dpop_request_with_nonce( 57 + method: String, 58 + url: String, 59 + session: AtprotoSession, 60 + body: String, 61 + nonce: option.Option(String), 62 + ) -> Result(Response(String), String) { 63 + // Generate DPoP proof token with optional nonce 64 + case 65 + jose_wrapper.generate_dpop_proof_with_nonce( 66 + method, 67 + url, 68 + session.access_token, 69 + session.dpop_jwk, 70 + nonce, 71 + ) 72 + { 73 + Error(err) -> 74 + Error("Failed to generate DPoP proof: " <> string.inspect(err)) 75 + Ok(dpop_proof) -> { 76 + // Create the HTTP request with DPoP headers 77 + case request.to(url) { 78 + Error(_) -> Error("Failed to create request") 79 + Ok(req) -> { 80 + let req = 81 + req 82 + |> request.set_method(parse_method(method)) 83 + |> request.set_header( 84 + "authorization", 85 + "DPoP " <> session.access_token, 86 + ) 87 + |> request.set_header("dpop", dpop_proof) 88 + |> request.set_header("content-type", "application/json") 89 + |> request.set_body(body) 90 + 91 + case httpc.send(req) { 92 + Error(_) -> Error("Request failed") 93 + Ok(resp) -> Ok(resp) 94 + } 95 + } 96 + } 97 + } 98 + } 99 + } 100 + 101 + /// Extract DPoP-Nonce header from response headers 102 + fn get_dpop_nonce_header( 103 + headers: List(#(String, String)), 104 + ) -> option.Option(String) { 105 + case 106 + list.find(headers, fn(header) { string.lowercase(header.0) == "dpop-nonce" }) 107 + { 108 + Ok(#(_, nonce)) -> Some(nonce) 109 + Error(_) -> None 110 + } 111 + } 112 + 113 + /// Helper to parse HTTP method string 114 + fn parse_method(method: String) -> Method { 115 + case string.uppercase(method) { 116 + "GET" -> Get 117 + "POST" -> Post 118 + "PUT" -> Put 119 + "DELETE" -> Delete 120 + "PATCH" -> Patch 121 + "HEAD" -> Head 122 + "OPTIONS" -> Options 123 + _ -> Post 124 + } 125 + }
+184
server/src/event_handler.gleam
··· 1 + import database 2 + import gleam/dynamic.{type Dynamic} 3 + import gleam/dynamic/decode 4 + import gleam/io 5 + import gleam/list 6 + import gleam/option 7 + import gleam/string 8 + import jetstream 9 + import lexicon 10 + import sqlight 11 + 12 + /// Convert a Dynamic value (Erlang term) to JSON string 13 + fn dynamic_to_json(value: Dynamic) -> String { 14 + // Erlang's json:encode returns an iolist, we need to convert it to a string 15 + let iolist = do_json_encode(value) 16 + iolist_to_string(iolist) 17 + } 18 + 19 + /// Encode a dynamic value to JSON (returns iolist) 20 + @external(erlang, "json", "encode") 21 + fn do_json_encode(value: Dynamic) -> Dynamic 22 + 23 + /// Convert an iolist to a string 24 + @external(erlang, "erlang", "iolist_to_binary") 25 + fn iolist_to_binary(iolist: Dynamic) -> Dynamic 26 + 27 + /// Wrapper to convert iolist to string 28 + fn iolist_to_string(iolist: Dynamic) -> String { 29 + let binary = iolist_to_binary(iolist) 30 + // The binary is already a string in Gleam's representation 31 + case decode.run(binary, decode.string) { 32 + Ok(str) -> str 33 + Error(_) -> { 34 + io.println_error("⚠️ Failed to convert iolist to string") 35 + string.inspect(iolist) 36 + } 37 + } 38 + } 39 + 40 + /// Handle a commit event (create, update, or delete) 41 + pub fn handle_commit_event( 42 + db: sqlight.Connection, 43 + did: String, 44 + commit: jetstream.CommitData, 45 + ) -> Nil { 46 + let uri = "at://" <> did <> "/" <> commit.collection <> "/" <> commit.rkey 47 + 48 + case commit.operation { 49 + "create" | "update" -> { 50 + // Extract record and cid from options 51 + case commit.record, commit.cid { 52 + option.Some(record_data), option.Some(cid_value) -> { 53 + // Convert the dynamic record to JSON string using Erlang's json:encode 54 + let json_string = dynamic_to_json(record_data) 55 + 56 + // Get lexicons from database for validation 57 + case database.get_all_lexicons(db) { 58 + Ok(lexicons) -> { 59 + let lexicon_jsons = list.map(lexicons, fn(lex) { lex.json }) 60 + 61 + // Validate record against lexicon 62 + case 63 + lexicon.validate_record( 64 + lexicon_jsons, 65 + commit.collection, 66 + json_string, 67 + ) 68 + { 69 + Ok(_) -> { 70 + // Validation passed, insert record 71 + case 72 + database.insert_record( 73 + db, 74 + uri, 75 + cid_value, 76 + did, 77 + commit.collection, 78 + json_string, 79 + ) 80 + { 81 + Ok(_) -> { 82 + io.println( 83 + "✅ " 84 + <> commit.operation 85 + <> " " 86 + <> commit.collection 87 + <> " (" 88 + <> commit.rkey 89 + <> ")", 90 + ) 91 + } 92 + Error(err) -> { 93 + io.println_error( 94 + "❌ Failed to insert record " 95 + <> uri 96 + <> ": " 97 + <> string.inspect(err), 98 + ) 99 + } 100 + } 101 + } 102 + Error(validation_error) -> { 103 + io.println_error( 104 + "⚠️ Validation failed for " 105 + <> uri 106 + <> ": " 107 + <> lexicon.describe_error(validation_error), 108 + ) 109 + } 110 + } 111 + } 112 + Error(db_err) -> { 113 + io.println_error( 114 + "❌ Failed to fetch lexicons for validation: " 115 + <> string.inspect(db_err), 116 + ) 117 + } 118 + } 119 + } 120 + _, _ -> { 121 + io.println_error( 122 + "⚠️ " 123 + <> commit.operation 124 + <> " event missing record or cid for " 125 + <> uri, 126 + ) 127 + } 128 + } 129 + } 130 + "delete" -> { 131 + io.println( 132 + "🗑️ delete " <> commit.collection <> " (" <> commit.rkey <> ")", 133 + ) 134 + io.println(" URI: " <> uri) 135 + 136 + case database.delete_record(db, uri) { 137 + Ok(_) -> { 138 + io.println(" ✓ Deleted from database") 139 + } 140 + Error(err) -> { 141 + io.println_error(" ❌ Failed to delete: " <> string.inspect(err)) 142 + } 143 + } 144 + } 145 + _ -> { 146 + io.println_error("⚠️ Unknown operation: " <> commit.operation) 147 + } 148 + } 149 + } 150 + 151 + /// Handle an identity event (update actor handle) 152 + pub fn handle_identity_event( 153 + db: sqlight.Connection, 154 + identity: jetstream.IdentityData, 155 + ) -> Nil { 156 + case database.upsert_actor(db, identity.did, identity.handle) { 157 + Ok(_) -> { 158 + io.println( 159 + "👤 identity update: " <> identity.handle <> " (" <> identity.did <> ")", 160 + ) 161 + } 162 + Error(err) -> { 163 + io.println_error( 164 + "❌ Failed to upsert actor " 165 + <> identity.did 166 + <> ": " 167 + <> string.inspect(err), 168 + ) 169 + } 170 + } 171 + } 172 + 173 + /// Handle an account event 174 + pub fn handle_account_event( 175 + _db: sqlight.Connection, 176 + account: jetstream.AccountData, 177 + ) -> Nil { 178 + // For now, just log account events - we could extend this in the future 179 + let status = case account.active { 180 + True -> "active" 181 + False -> "inactive" 182 + } 183 + io.println("🔐 account " <> status <> ": " <> account.did) 184 + }
+92
server/src/graphiql_handler.gleam
··· 1 + /// GraphiQL interface handler 2 + /// 3 + /// Serves the GraphiQL interactive GraphQL IDE 4 + import wisp 5 + 6 + pub fn handle_graphiql_request(_req: wisp.Request) -> wisp.Response { 7 + let graphiql_html = 8 + "<!doctype html> 9 + <html lang=\"en\"> 10 + <head> 11 + <meta charset=\"UTF-8\" /> 12 + <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" /> 13 + <title>QuickSlice GraphiQL</title> 14 + <style> 15 + body { 16 + margin: 0; 17 + } 18 + 19 + #graphiql { 20 + height: 100dvh; 21 + } 22 + 23 + .loading { 24 + height: 100%; 25 + display: flex; 26 + align-items: center; 27 + justify-content: center; 28 + font-size: 4rem; 29 + } 30 + </style> 31 + <link rel=\"stylesheet\" href=\"https://esm.sh/graphiql/dist/style.css\" /> 32 + <link 33 + rel=\"stylesheet\" 34 + href=\"https://esm.sh/@graphiql/plugin-explorer/dist/style.css\" 35 + /> 36 + <script type=\"importmap\"> 37 + { 38 + \"imports\": { 39 + \"react\": \"https://esm.sh/react@19.1.0\", 40 + \"react/\": \"https://esm.sh/react@19.1.0/\", 41 + 42 + \"react-dom\": \"https://esm.sh/react-dom@19.1.0\", 43 + \"react-dom/\": \"https://esm.sh/react-dom@19.1.0/\", 44 + 45 + \"graphiql\": \"https://esm.sh/graphiql?standalone&external=react,react-dom,@graphiql/react,graphql\", 46 + \"graphiql/\": \"https://esm.sh/graphiql/\", 47 + \"@graphiql/plugin-explorer\": \"https://esm.sh/@graphiql/plugin-explorer?standalone&external=react,@graphiql/react,graphql\", 48 + \"@graphiql/react\": \"https://esm.sh/@graphiql/react?standalone&external=react,react-dom,graphql,@graphiql/toolkit,@emotion/is-prop-valid\", 49 + 50 + \"@graphiql/toolkit\": \"https://esm.sh/@graphiql/toolkit?standalone&external=graphql\", 51 + \"graphql\": \"https://esm.sh/graphql@16.11.0\", 52 + \"@emotion/is-prop-valid\": \"data:text/javascript,\" 53 + } 54 + } 55 + </script> 56 + <script type=\"module\"> 57 + import React from 'react'; 58 + import ReactDOM from 'react-dom/client'; 59 + import { GraphiQL, HISTORY_PLUGIN } from 'graphiql'; 60 + import { createGraphiQLFetcher } from '@graphiql/toolkit'; 61 + import { explorerPlugin } from '@graphiql/plugin-explorer'; 62 + import 'graphiql/setup-workers/esm.sh'; 63 + 64 + const fetcher = createGraphiQLFetcher({ 65 + url: '/graphql' 66 + }); 67 + const plugins = [HISTORY_PLUGIN, explorerPlugin()]; 68 + 69 + function App() { 70 + return React.createElement(GraphiQL, { 71 + fetcher, 72 + plugins, 73 + defaultEditorToolsVisibility: true, 74 + }); 75 + } 76 + 77 + const container = document.getElementById('graphiql'); 78 + const root = ReactDOM.createRoot(container); 79 + root.render(React.createElement(App)); 80 + </script> 81 + </head> 82 + <body> 83 + <div id=\"graphiql\"> 84 + <div class=\"loading\">Loading…</div> 85 + </div> 86 + </body> 87 + </html>" 88 + 89 + wisp.response(200) 90 + |> wisp.set_header("content-type", "text/html; charset=utf-8") 91 + |> wisp.set_body(wisp.Text(graphiql_html)) 92 + }
+208
server/src/graphql_gleam.gleam
··· 1 + /// Pure Gleam GraphQL Implementation 2 + /// 3 + /// This module provides GraphQL schema building and query execution using 4 + /// pure Gleam code, replacing the previous Elixir FFI implementation. 5 + import database 6 + import gleam/dict 7 + import gleam/dynamic 8 + import gleam/dynamic/decode 9 + import gleam/json 10 + import gleam/list 11 + import gleam/option 12 + import gleam/result 13 + import gleam/string 14 + import graphql/executor 15 + import graphql/schema 16 + import graphql/value 17 + import lexicon_graphql/db_schema_builder 18 + import lexicon_graphql/lexicon_parser 19 + import sqlight 20 + 21 + /// Execute a GraphQL query against lexicons in the database 22 + /// 23 + /// This fetches lexicons, builds a schema with database resolvers, 24 + /// executes the query, and returns the result as JSON. 25 + pub fn execute_query_with_db( 26 + db: sqlight.Connection, 27 + query_string: String, 28 + ) -> Result(String, String) { 29 + // Step 1: Fetch lexicons from database 30 + use lexicon_records <- result.try( 31 + database.get_all_lexicons(db) 32 + |> result.map_error(fn(_) { "Failed to fetch lexicons from database" }), 33 + ) 34 + 35 + // Step 2: Parse lexicon JSON into structured Lexicon types 36 + let parsed_lexicons = 37 + lexicon_records 38 + |> list.filter_map(fn(lex) { lexicon_parser.parse_lexicon(lex.json) }) 39 + 40 + // Check if we got any valid lexicons 41 + case parsed_lexicons { 42 + [] -> Error("No valid lexicons found in database") 43 + _ -> { 44 + // Step 3: Create a record fetcher function that queries the database 45 + let record_fetcher = fn(collection_nsid: String) -> Result( 46 + List(value.Value), 47 + String, 48 + ) { 49 + // Fetch records from database for this collection 50 + case database.get_records_by_collection(db, collection_nsid) { 51 + Error(_) -> Ok([]) 52 + // Return empty list if no records found 53 + Ok(records) -> { 54 + // Convert database records to GraphQL values 55 + let graphql_records = 56 + list.map(records, fn(record) { record_to_graphql_value(record) }) 57 + Ok(graphql_records) 58 + } 59 + } 60 + } 61 + 62 + // Step 4: Build schema with database-backed resolvers 63 + use graphql_schema <- result.try( 64 + db_schema_builder.build_schema_with_fetcher( 65 + parsed_lexicons, 66 + record_fetcher, 67 + ), 68 + ) 69 + 70 + // Step 5: Execute the query 71 + let ctx = schema.Context(data: option.None) 72 + use response <- result.try(executor.execute( 73 + query_string, 74 + graphql_schema, 75 + ctx, 76 + )) 77 + 78 + // Step 6: Format the response as JSON 79 + Ok(format_response(response)) 80 + } 81 + } 82 + } 83 + 84 + /// Convert a database Record to a GraphQL value.Value 85 + /// 86 + /// Creates an Object with all the record metadata plus the parsed JSON value 87 + fn record_to_graphql_value(record: database.Record) -> value.Value { 88 + // Parse the record JSON and convert to GraphQL value 89 + let value_object = case parse_json_to_value(record.json) { 90 + Ok(val) -> val 91 + Error(_) -> value.Object([]) 92 + // Fallback to empty object on parse error 93 + } 94 + 95 + // Create the full record object with metadata and value 96 + value.Object([ 97 + #("uri", value.String(record.uri)), 98 + #("cid", value.String(record.cid)), 99 + #("did", value.String(record.did)), 100 + #("collection", value.String(record.collection)), 101 + #("indexedAt", value.String(record.indexed_at)), 102 + #("value", value_object), 103 + ]) 104 + } 105 + 106 + /// Parse a JSON string and convert it to a GraphQL value.Value 107 + fn parse_json_to_value(json_str: String) -> Result(value.Value, String) { 108 + // Parse JSON string to dynamic value 109 + case json.parse(json_str, decode.dynamic) { 110 + Ok(dyn) -> Ok(dynamic_to_value(dyn)) 111 + Error(_) -> Error("Failed to parse JSON") 112 + } 113 + } 114 + 115 + /// Convert a dynamic value to a GraphQL value.Value 116 + fn dynamic_to_value(dyn: dynamic.Dynamic) -> value.Value { 117 + // Try different decoders in order 118 + case decode.run(dyn, decode.string) { 119 + Ok(s) -> value.String(s) 120 + Error(_) -> 121 + case decode.run(dyn, decode.int) { 122 + Ok(i) -> value.Int(i) 123 + Error(_) -> 124 + case decode.run(dyn, decode.float) { 125 + Ok(f) -> value.Float(f) 126 + Error(_) -> 127 + case decode.run(dyn, decode.bool) { 128 + Ok(b) -> value.Boolean(b) 129 + Error(_) -> 130 + case decode.run(dyn, decode.list(decode.dynamic)) { 131 + Ok(items) -> { 132 + let converted_items = list.map(items, dynamic_to_value) 133 + value.List(converted_items) 134 + } 135 + Error(_) -> 136 + case 137 + decode.run( 138 + dyn, 139 + decode.dict(decode.string, decode.dynamic), 140 + ) 141 + { 142 + Ok(dict) -> { 143 + let fields = 144 + dict 145 + |> dict.to_list 146 + |> list.map(fn(entry) { 147 + let #(key, val) = entry 148 + #(key, dynamic_to_value(val)) 149 + }) 150 + value.Object(fields) 151 + } 152 + Error(_) -> value.Null 153 + } 154 + } 155 + } 156 + } 157 + } 158 + } 159 + } 160 + 161 + /// Format an executor.Response as JSON string 162 + fn format_response(response: executor.Response) -> String { 163 + let data_json = value_to_json(response.data) 164 + 165 + let errors_json = case response.errors { 166 + [] -> "[]" 167 + errors -> { 168 + let error_strings = 169 + list.map(errors, fn(err) { 170 + let message_json = json.string(err.message) |> json.to_string 171 + let path_json = 172 + json.array(err.path, of: json.string) |> json.to_string 173 + 174 + "{\"message\": " <> message_json <> ", \"path\": " <> path_json <> "}" 175 + }) 176 + 177 + "[" <> string.join(error_strings, ",") <> "]" 178 + } 179 + } 180 + 181 + "{\"data\": " <> data_json <> ", \"errors\": " <> errors_json <> "}" 182 + } 183 + 184 + /// Convert a GraphQL value to JSON string 185 + fn value_to_json(val: value.Value) -> String { 186 + case val { 187 + value.Null -> "null" 188 + value.Int(i) -> json.int(i) |> json.to_string 189 + value.Float(f) -> json.float(f) |> json.to_string 190 + value.String(s) -> json.string(s) |> json.to_string 191 + value.Boolean(b) -> json.bool(b) |> json.to_string 192 + value.Enum(e) -> json.string(e) |> json.to_string 193 + value.List(items) -> { 194 + let item_jsons = list.map(items, value_to_json) 195 + "[" <> string.join(item_jsons, ",") <> "]" 196 + } 197 + value.Object(fields) -> { 198 + let field_jsons = 199 + list.map(fields, fn(field) { 200 + let #(key, value) = field 201 + let key_json = json.string(key) |> json.to_string 202 + let value_json = value_to_json(value) 203 + key_json <> ": " <> value_json 204 + }) 205 + "{" <> string.join(field_jsons, ",") <> "}" 206 + } 207 + } 208 + }
+115
server/src/graphql_handler.gleam
··· 1 + /// GraphQL HTTP request handler 2 + /// 3 + /// Handles POST requests to /graphql endpoint, builds schemas from lexicons, 4 + /// and executes GraphQL queries. 5 + import gleam/bit_array 6 + import gleam/dynamic/decode 7 + import gleam/http 8 + import gleam/json 9 + import gleam/list 10 + import gleam/result 11 + import graphql_gleam 12 + import sqlight 13 + import wisp 14 + 15 + /// Handle GraphQL HTTP requests 16 + /// 17 + /// Expects POST requests with JSON body containing: 18 + /// - query: GraphQL query string 19 + /// 20 + /// Returns GraphQL query results as JSON 21 + pub fn handle_graphql_request( 22 + req: wisp.Request, 23 + db: sqlight.Connection, 24 + ) -> wisp.Response { 25 + case req.method { 26 + http.Post -> handle_graphql_post(req, db) 27 + http.Get -> handle_graphql_get(req, db) 28 + _ -> method_not_allowed_response() 29 + } 30 + } 31 + 32 + fn handle_graphql_post( 33 + req: wisp.Request, 34 + db: sqlight.Connection, 35 + ) -> wisp.Response { 36 + // Read request body 37 + case wisp.read_body_bits(req) { 38 + Ok(body) -> { 39 + case bit_array.to_string(body) { 40 + Ok(body_string) -> { 41 + // Parse JSON to extract query 42 + case extract_query_from_json(body_string) { 43 + Ok(query) -> execute_graphql_query(db, query) 44 + Error(err) -> bad_request_response("Invalid JSON: " <> err) 45 + } 46 + } 47 + Error(_) -> bad_request_response("Request body must be valid UTF-8") 48 + } 49 + } 50 + Error(_) -> bad_request_response("Failed to read request body") 51 + } 52 + } 53 + 54 + fn handle_graphql_get( 55 + req: wisp.Request, 56 + db: sqlight.Connection, 57 + ) -> wisp.Response { 58 + // Support GET requests with query parameter 59 + let query_params = wisp.get_query(req) 60 + case list.key_find(query_params, "query") { 61 + Ok(query) -> execute_graphql_query(db, query) 62 + Error(_) -> bad_request_response("Missing 'query' parameter") 63 + } 64 + } 65 + 66 + fn execute_graphql_query(db: sqlight.Connection, query: String) -> wisp.Response { 67 + // Use the new pure Gleam GraphQL implementation 68 + case graphql_gleam.execute_query_with_db(db, query) { 69 + Ok(result_json) -> success_response(result_json) 70 + Error(err) -> internal_error_response(err) 71 + } 72 + } 73 + 74 + fn extract_query_from_json(json_str: String) -> Result(String, String) { 75 + // Use proper JSON decoder with gleam/json and gleam/dynamic/decode 76 + let decoder = { 77 + use query <- decode.field("query", decode.string) 78 + decode.success(query) 79 + } 80 + 81 + json.parse(json_str, decoder) 82 + |> result.map_error(fn(_) { "Invalid JSON or missing 'query' field" }) 83 + } 84 + 85 + // Response helpers 86 + 87 + fn success_response(data: String) -> wisp.Response { 88 + wisp.response(200) 89 + |> wisp.set_header("content-type", "application/json") 90 + |> wisp.set_body(wisp.Text(data)) 91 + } 92 + 93 + fn bad_request_response(message: String) -> wisp.Response { 94 + wisp.response(400) 95 + |> wisp.set_header("content-type", "application/json") 96 + |> wisp.set_body(wisp.Text( 97 + "{\"error\": \"BadRequest\", \"message\": \"" <> message <> "\"}", 98 + )) 99 + } 100 + 101 + fn internal_error_response(message: String) -> wisp.Response { 102 + wisp.response(500) 103 + |> wisp.set_header("content-type", "application/json") 104 + |> wisp.set_body(wisp.Text( 105 + "{\"error\": \"InternalError\", \"message\": \"" <> message <> "\"}", 106 + )) 107 + } 108 + 109 + fn method_not_allowed_response() -> wisp.Response { 110 + wisp.response(405) 111 + |> wisp.set_header("content-type", "application/json") 112 + |> wisp.set_body(wisp.Text( 113 + "{\"error\": \"MethodNotAllowed\", \"message\": \"Only POST and GET are allowed\"}", 114 + )) 115 + }
+190
server/src/importer.gleam
··· 1 + import database 2 + import gleam/dynamic/decode 3 + import gleam/io 4 + import gleam/json 5 + import gleam/list 6 + import gleam/result 7 + import gleam/string 8 + import lexicon 9 + import simplifile 10 + import sqlight 11 + 12 + pub type ImportStats { 13 + ImportStats(total: Int, imported: Int, failed: Int, errors: List(String)) 14 + } 15 + 16 + /// Imports lexicons from a directory into the database 17 + pub fn import_lexicons_from_directory( 18 + directory: String, 19 + ) -> Result(ImportStats, String) { 20 + use db <- result.try(case database.initialize("atproto.db") { 21 + Ok(conn) -> Ok(conn) 22 + Error(_) -> Error("Failed to initialize database") 23 + }) 24 + 25 + // Scan directory for JSON files 26 + io.println("📁 Scanning directory recursively...") 27 + use file_paths <- result.try(scan_directory_recursive(directory)) 28 + 29 + io.println( 30 + " ✓ Found " <> string.inspect(list.length(file_paths)) <> " .json files", 31 + ) 32 + io.println("") 33 + io.println("📝 Validating and importing lexicons...") 34 + 35 + // Import each file 36 + let results = 37 + file_paths 38 + |> list.map(fn(file_path) { import_single_lexicon(db, file_path) }) 39 + 40 + // Calculate stats 41 + let total = list.length(results) 42 + let imported = 43 + results 44 + |> list.filter(fn(r) { 45 + case r { 46 + Ok(_) -> True 47 + Error(_) -> False 48 + } 49 + }) 50 + |> list.length 51 + 52 + let failed = total - imported 53 + 54 + let errors = 55 + results 56 + |> list.filter_map(fn(r) { 57 + case r { 58 + Error(err) -> Ok(err) 59 + Ok(_) -> Error(Nil) 60 + } 61 + }) 62 + 63 + Ok(ImportStats( 64 + total: total, 65 + imported: imported, 66 + failed: failed, 67 + errors: errors, 68 + )) 69 + } 70 + 71 + /// Scans a directory recursively for JSON files 72 + pub fn scan_directory_recursive(path: String) -> Result(List(String), String) { 73 + case simplifile.is_directory(path) { 74 + Ok(False) -> Error("Path is not a directory: " <> path) 75 + Error(_) -> Error("Failed to access directory: " <> path) 76 + Ok(True) -> { 77 + case simplifile.read_directory(path) { 78 + Error(_) -> Error("Failed to read directory: " <> path) 79 + Ok(entries) -> { 80 + entries 81 + |> list.filter_map(fn(entry) { 82 + let entry_path = path <> "/" <> entry 83 + 84 + case simplifile.is_directory(entry_path) { 85 + Ok(True) -> { 86 + // Recursively scan subdirectory 87 + case scan_directory_recursive(entry_path) { 88 + Ok(paths) -> Ok(paths) 89 + Error(_) -> Error(Nil) 90 + } 91 + } 92 + _ -> { 93 + // Check if it's a .json file 94 + case string.ends_with(entry, ".json") { 95 + True -> Ok([entry_path]) 96 + False -> Error(Nil) 97 + } 98 + } 99 + } 100 + }) 101 + |> list.flatten 102 + |> Ok 103 + } 104 + } 105 + } 106 + } 107 + } 108 + 109 + /// Parses and validates a lexicon file 110 + pub fn parse_and_validate_lexicon( 111 + file_path: String, 112 + ) -> Result(#(String, String), String) { 113 + // Read file content 114 + use json_content <- result.try(case simplifile.read(file_path) { 115 + Ok(content) -> Ok(content) 116 + Error(_) -> Error("Failed to read file") 117 + }) 118 + 119 + // Extract lexicon ID from JSON 120 + use lexicon_id <- result.try(extract_lexicon_id(json_content)) 121 + 122 + // Validate using lexicon package 123 + case lexicon.validate_schemas([json_content]) { 124 + Ok(_) -> Ok(#(lexicon_id, json_content)) 125 + Error(err) -> Error("Validation failed: " <> format_validation_error(err)) 126 + } 127 + } 128 + 129 + /// Extracts the lexicon ID from JSON content 130 + fn extract_lexicon_id(json_content: String) -> Result(String, String) { 131 + // Try to decode "id" field first 132 + let id_decoder = { 133 + use id <- decode.field("id", decode.string) 134 + decode.success(id) 135 + } 136 + 137 + // Try to decode "lexicon" field as fallback 138 + let lexicon_decoder = { 139 + use lex <- decode.field("lexicon", decode.string) 140 + decode.success(lex) 141 + } 142 + 143 + case json.parse(json_content, id_decoder) { 144 + Ok(id) -> Ok(id) 145 + Error(_) -> 146 + case json.parse(json_content, lexicon_decoder) { 147 + Ok(id) -> Ok(id) 148 + Error(_) -> 149 + Error("Missing 'id' or 'lexicon' field - not a valid lexicon schema") 150 + } 151 + } 152 + } 153 + 154 + /// Formats validation errors into readable strings 155 + fn format_validation_error(error: lexicon.ValidationError) -> String { 156 + // Just convert to string for now - the lexicon package will provide details 157 + string.inspect(error) 158 + } 159 + 160 + /// Imports a single lexicon file 161 + pub fn import_single_lexicon( 162 + conn: sqlight.Connection, 163 + file_path: String, 164 + ) -> Result(String, String) { 165 + let file_name = case string.split(file_path, "/") |> list.last { 166 + Ok(name) -> name 167 + Error(_) -> file_path 168 + } 169 + 170 + case parse_and_validate_lexicon(file_path) { 171 + Ok(#(lexicon_id, json_content)) -> { 172 + case database.insert_lexicon(conn, lexicon_id, json_content) { 173 + Ok(_) -> { 174 + io.println(" ✓ " <> lexicon_id) 175 + Ok(lexicon_id) 176 + } 177 + Error(_) -> { 178 + let err_msg = file_name <> ": Database insertion failed" 179 + io.println(" ✗ " <> err_msg) 180 + Error(err_msg) 181 + } 182 + } 183 + } 184 + Error(err) -> { 185 + let err_msg = file_name <> ": " <> err 186 + io.println(" ✗ " <> err_msg) 187 + Error(err_msg) 188 + } 189 + } 190 + }
+92
server/src/jetstream_consumer.gleam
··· 1 + import database 2 + import event_handler 3 + import gleam/erlang/process 4 + import gleam/int 5 + import gleam/io 6 + import gleam/list 7 + import gleam/string 8 + import jetstream 9 + import sqlight 10 + 11 + /// Start the Jetstream consumer in a background process 12 + pub fn start(db: sqlight.Connection) -> Result(Nil, String) { 13 + io.println("") 14 + io.println("🚀 Starting Jetstream consumer...") 15 + 16 + // Get all record-type lexicons from the database 17 + case database.get_record_type_lexicons(db) { 18 + Ok(lexicons) -> { 19 + let collection_ids = list.map(lexicons, fn(lex) { lex.id }) 20 + 21 + case collection_ids { 22 + [] -> { 23 + io.println( 24 + "⚠️ No record-type lexicons found - skipping Jetstream consumer", 25 + ) 26 + io.println(" Import lexicons first to enable real-time indexing") 27 + io.println("") 28 + Ok(Nil) 29 + } 30 + _ -> { 31 + io.println( 32 + "📋 Listening to " 33 + <> int.to_string(list.length(collection_ids)) 34 + <> " collections:", 35 + ) 36 + list.each(collection_ids, fn(col) { io.println(" - " <> col) }) 37 + 38 + // Create Jetstream config 39 + let config = 40 + jetstream.JetstreamConfig( 41 + endpoint: "wss://jetstream2.us-east.bsky.network/subscribe", 42 + wanted_collections: collection_ids, 43 + wanted_dids: [], 44 + ) 45 + 46 + io.println("") 47 + io.println("🌐 Connecting to Jetstream...") 48 + io.println(" Endpoint: " <> config.endpoint) 49 + io.println(" DID filter: All DIDs (no filter)") 50 + io.println("") 51 + 52 + // Start the Jetstream consumer in a separate process 53 + // This will run independently and call our event handler callback 54 + process.spawn_unlinked(fn() { 55 + jetstream.start_consumer(config, fn(event_json) { 56 + handle_jetstream_event(db, event_json) 57 + }) 58 + }) 59 + 60 + io.println("✅ Jetstream consumer started") 61 + io.println("") 62 + 63 + Ok(Nil) 64 + } 65 + } 66 + } 67 + Error(err) -> { 68 + Error("Failed to fetch lexicons: " <> string.inspect(err)) 69 + } 70 + } 71 + } 72 + 73 + /// Handle a raw Jetstream event JSON string 74 + fn handle_jetstream_event(db: sqlight.Connection, event_json: String) -> Nil { 75 + case jetstream.parse_event(event_json) { 76 + jetstream.CommitEvent(did, _time_us, commit) -> { 77 + event_handler.handle_commit_event(db, did, commit) 78 + } 79 + jetstream.IdentityEvent(_did, _time_us, _identity) -> { 80 + // Silently ignore identity events 81 + Nil 82 + } 83 + jetstream.AccountEvent(_did, _time_us, _account) -> { 84 + // Silently ignore account events 85 + Nil 86 + } 87 + jetstream.UnknownEvent(_raw) -> { 88 + // Silently ignore unknown events 89 + Nil 90 + } 91 + } 92 + }
+101
server/src/jose_ffi.erl
··· 1 + -module(jose_ffi). 2 + -export([generate_dpop_proof/5, sha256_hash/1]). 3 + 4 + %% Generate a DPoP proof JWT token 5 + %% Args: Method (binary), URL (binary), AccessToken (binary), JWKJson (binary), ServerNonce (binary) 6 + %% Returns: {ok, DPoPToken} | {error, Reason} 7 + generate_dpop_proof(Method, URL, AccessToken, JWKJson, ServerNonce) -> 8 + try 9 + %% Decode JSON - OTP 27+ has json module built-in 10 + JWKMap = json:decode(JWKJson), 11 + JWK = jose_jwk:from_map(JWKMap), 12 + 13 + %% Get current timestamp 14 + Now = erlang:system_time(second), 15 + 16 + %% Generate a unique jti (different from the server nonce) 17 + Jti = base64:encode(crypto:strong_rand_bytes(16)), 18 + 19 + %% Hash the access token for "ath" claim (base64url of SHA-256) 20 + TokenHash = sha256_base64url(AccessToken), 21 + 22 + %% Create the DPoP header - include the public JWK 23 + {_, PublicJWK} = jose_jwk:to_public_map(JWK), 24 + 25 + %% Create the base DPoP payload 26 + BasePayload = #{ 27 + <<"jti">> => Jti, 28 + <<"htm">> => Method, 29 + <<"htu">> => URL, 30 + <<"iat">> => Now, 31 + <<"ath">> => TokenHash 32 + }, 33 + 34 + %% Add nonce field if ServerNonce is not empty 35 + Payload = case ServerNonce of 36 + <<>> -> BasePayload; 37 + _ -> maps:put(<<"nonce">>, ServerNonce, BasePayload) 38 + end, 39 + 40 + %% Sign the JWT using jose compact API 41 + Alg = detect_algorithm(JWK), 42 + 43 + %% Create JWS header with custom fields 44 + JWSHeader = #{ 45 + <<"alg">> => Alg, 46 + <<"typ">> => <<"dpop+jwt">>, 47 + <<"jwk">> => PublicJWK 48 + }, 49 + 50 + %% Create JWT and JWS structs 51 + JWT = jose_jwt:from_map(Payload), 52 + JWS = jose_jws:from_map(JWSHeader), 53 + 54 + %% Sign the JWT 55 + Signed = jose_jwt:sign(JWK, JWS, JWT), 56 + 57 + %% Compact to get the token string 58 + {_JWS, CompactToken} = jose_jws:compact(Signed), 59 + 60 + {ok, CompactToken} 61 + catch 62 + error:Reason -> 63 + {error, {dpop_generation_failed, Reason}}; 64 + _:Error -> 65 + {error, {dpop_generation_failed, Error}} 66 + end. 67 + 68 + %% Hash a string using SHA-256 and return base64 encoded result 69 + sha256_hash(Data) when is_binary(Data) -> 70 + Hash = crypto:hash(sha256, Data), 71 + base64:encode(Hash); 72 + sha256_hash(Data) when is_list(Data) -> 73 + sha256_hash(list_to_binary(Data)). 74 + 75 + %% Internal: Hash and base64url encode for "ath" claim 76 + sha256_base64url(Data) -> 77 + Hash = crypto:hash(sha256, Data), 78 + base64url_encode(Hash). 79 + 80 + %% Internal: Base64 URL-safe encoding (no padding) 81 + base64url_encode(Bin) -> 82 + %% Standard base64 encode, then make URL-safe 83 + Base64 = base64:encode(Bin), 84 + %% Replace + with -, / with _, and remove padding = 85 + NoPlus = binary:replace(Base64, <<"+">>, <<"-">>, [global]), 86 + NoSlash = binary:replace(NoPlus, <<"/">>, <<"_">>, [global]), 87 + binary:replace(NoSlash, <<"=">>, <<"">>, [global]). 88 + 89 + %% Internal: Detect algorithm from JWK 90 + detect_algorithm(JWK) -> 91 + case jose_jwk:to_map(JWK) of 92 + {_Kind, #{<<"kty">> := <<"EC">>}} -> 93 + <<"ES256">>; 94 + {_Kind, #{<<"kty">> := <<"RSA">>}} -> 95 + <<"RS256">>; 96 + {_Kind, #{<<"kty">> := <<"OKP">>}} -> 97 + <<"EdDSA">>; 98 + _ -> 99 + <<"ES256">> %% Default to ES256 100 + end. 101 +
+58
server/src/jose_wrapper.gleam
··· 1 + /// Gleam wrapper for JOSE (JSON Object Signing and Encryption) library 2 + /// Provides functions for DPoP proof generation using Erlang's jose library 3 + import gleam/option.{type Option} 4 + 5 + /// Generate a DPoP proof JWT token with optional nonce 6 + /// 7 + /// Creates a signed JWT token for DPoP (Demonstrating Proof-of-Possession) authentication. 8 + /// The token includes: 9 + /// - jti: Unique nonce for this proof 10 + /// - htm: HTTP method (GET, POST, etc.) 11 + /// - htu: HTTP URI being accessed 12 + /// - iat: Timestamp when proof was created 13 + /// - ath: Base64url-encoded SHA-256 hash of the access token 14 + /// - nonce: Optional DPoP nonce from server (if provided) 15 + /// 16 + /// # Arguments 17 + /// * `method` - HTTP method (e.g., "POST", "GET") 18 + /// * `url` - Full URL being accessed 19 + /// * `access_token` - OAuth access token 20 + /// * `jwk_json` - JSON Web Key as a JSON string 21 + /// * `nonce` - Optional nonce from server's DPoP-Nonce header 22 + /// 23 + /// # Returns 24 + /// * `Ok(String)` - The DPoP proof token (compact JWT format) 25 + /// * `Error(String)` - Error message if generation fails 26 + pub fn generate_dpop_proof_with_nonce( 27 + method: String, 28 + url: String, 29 + access_token: String, 30 + jwk_json: String, 31 + nonce: Option(String), 32 + ) -> Result(String, String) { 33 + case nonce { 34 + option.Some(n) -> 35 + generate_dpop_proof_internal(method, url, access_token, jwk_json, n) 36 + option.None -> 37 + generate_dpop_proof_internal(method, url, access_token, jwk_json, "") 38 + } 39 + } 40 + 41 + @external(erlang, "jose_ffi", "generate_dpop_proof") 42 + fn generate_dpop_proof_internal( 43 + method: String, 44 + url: String, 45 + access_token: String, 46 + jwk_json: String, 47 + nonce: String, 48 + ) -> Result(String, String) 49 + 50 + /// Hash a string using SHA-256 51 + /// 52 + /// # Arguments 53 + /// * `data` - The string to hash 54 + /// 55 + /// # Returns 56 + /// Base64-encoded SHA-256 hash 57 + @external(erlang, "jose_ffi", "sha256_hash") 58 + pub fn sha256_hash(data: String) -> String
+598
server/src/server.gleam
··· 1 + import argv 2 + import backfill 3 + import database 4 + import dotenv_gleam 5 + import envoy 6 + import gleam/erlang/process.{type Subject} 7 + import gleam/int 8 + import gleam/io 9 + import gleam/list 10 + import gleam/option 11 + import gleam/otp/actor 12 + import graphiql_handler 13 + import graphql_handler 14 + import importer 15 + import jetstream 16 + import jetstream_consumer 17 + import lustre/attribute 18 + import lustre/element 19 + import lustre/element/html 20 + import mist 21 + import sqlight 22 + import wisp 23 + import wisp/wisp_mist 24 + import xrpc_handlers 25 + import xrpc_router 26 + 27 + pub type Context { 28 + Context(db: sqlight.Connection, auth_base_url: String) 29 + } 30 + 31 + pub type BackfillMessage { 32 + StartLexiconBackfill(reply_to: Subject(Nil)) 33 + StartCustomBackfill(collections: List(String), reply_to: Subject(Nil)) 34 + } 35 + 36 + fn handle_backfill(db: sqlight.Connection, message: BackfillMessage) { 37 + case message { 38 + StartLexiconBackfill(client) -> { 39 + io.println("🔄 Starting lexicon schema backfill...") 40 + backfill_lexicon_schemas(db) 41 + 42 + // After lexicon backfill, check which collections have lexicons 43 + io.println("") 44 + io.println("🔍 Checking collections for lexicons...") 45 + 46 + let collections_to_check = ["xyz.statusphere.status"] 47 + 48 + let collections_with_lexicons = 49 + collections_to_check 50 + |> list.filter(fn(collection) { 51 + case database.has_lexicon_for_collection(db, collection) { 52 + Ok(True) -> { 53 + io.println(" ✓ Found lexicon for: " <> collection) 54 + True 55 + } 56 + Ok(False) -> { 57 + io.println(" ✗ No lexicon for: " <> collection) 58 + False 59 + } 60 + Error(_) -> { 61 + io.println(" ⚠️ Error checking lexicon for: " <> collection) 62 + False 63 + } 64 + } 65 + }) 66 + 67 + case collections_with_lexicons { 68 + [] -> { 69 + io.println("") 70 + io.println( 71 + "⚠️ No collections with lexicons found - skipping custom backfill", 72 + ) 73 + } 74 + _ -> { 75 + io.println("") 76 + io.println( 77 + "📋 Starting custom backfill for " 78 + <> int.to_string(list.length(collections_with_lexicons)) 79 + <> " collections with lexicons...", 80 + ) 81 + run_custom_backfill_for_collections(db, collections_with_lexicons) 82 + } 83 + } 84 + 85 + process.send(client, Nil) 86 + actor.continue(db) 87 + } 88 + StartCustomBackfill(collections, client) -> { 89 + io.println("🔄 Starting custom backfill for specified collections...") 90 + run_custom_backfill_for_collections(db, collections) 91 + process.send(client, Nil) 92 + actor.continue(db) 93 + } 94 + } 95 + } 96 + 97 + pub fn main() { 98 + // Check for CLI arguments 99 + case argv.load().arguments { 100 + ["import", directory] -> run_import_command(directory) 101 + ["backfill"] -> run_backfill_command() 102 + _ -> start_server_normally() 103 + } 104 + } 105 + 106 + fn run_import_command(directory: String) { 107 + io.println("🔄 Importing lexicons from: " <> directory) 108 + io.println("") 109 + 110 + case importer.import_lexicons_from_directory(directory) { 111 + Ok(stats) -> { 112 + io.println("") 113 + io.println("✅ Import complete!") 114 + io.println(" Total files: " <> int.to_string(stats.total)) 115 + io.println(" Imported: " <> int.to_string(stats.imported)) 116 + io.println(" Failed: " <> int.to_string(stats.failed)) 117 + 118 + case stats.errors { 119 + [] -> Nil 120 + errors -> { 121 + io.println("") 122 + io.println("⚠️ Errors:") 123 + list.each(errors, fn(err) { io.println(" " <> err) }) 124 + } 125 + } 126 + } 127 + Error(err) -> { 128 + io.println_error("❌ Import failed: " <> err) 129 + } 130 + } 131 + } 132 + 133 + fn run_backfill_command() { 134 + io.println("🔄 Starting backfill for record-type lexicon collections") 135 + io.println("") 136 + 137 + // Initialize the database 138 + let assert Ok(db) = database.initialize("atproto.db") 139 + 140 + // Get all record-type lexicons 141 + io.println("📚 Fetching record-type lexicons from database...") 142 + case database.get_record_type_lexicons(db) { 143 + Ok(lexicons) -> { 144 + case lexicons { 145 + [] -> { 146 + io.println("⚠️ No record-type lexicons found in database") 147 + io.println( 148 + " Hint: Run 'gleam run -- import priv/lexicons' to import lexicons first", 149 + ) 150 + } 151 + _ -> { 152 + let collections = list.map(lexicons, fn(lex) { lex.id }) 153 + io.println( 154 + "✓ Found " 155 + <> int.to_string(list.length(collections)) 156 + <> " record-type collection(s):", 157 + ) 158 + list.each(collections, fn(col) { io.println(" - " <> col) }) 159 + 160 + io.println("") 161 + let config = backfill.default_config() 162 + backfill.backfill_collections([], collections, [], config, db) 163 + } 164 + } 165 + } 166 + Error(_) -> { 167 + io.println_error("❌ Failed to fetch lexicons from database") 168 + } 169 + } 170 + } 171 + 172 + fn start_server_normally() { 173 + // Load environment variables from .env file 174 + let _ = dotenv_gleam.config() 175 + 176 + // Initialize the database 177 + let assert Ok(db) = database.initialize("atproto.db") 178 + 179 + // Auto-import lexicons from priv/lexicons if directory exists 180 + io.println("") 181 + io.println("🔍 Checking for lexicons in priv/lexicons...") 182 + case importer.import_lexicons_from_directory("priv/lexicons") { 183 + Ok(stats) -> { 184 + case stats.imported { 185 + 0 -> io.println(" ℹ️ No lexicons found to import") 186 + _ -> { 187 + io.println( 188 + " ✓ Imported " <> int.to_string(stats.imported) <> " lexicon(s)", 189 + ) 190 + } 191 + } 192 + } 193 + Error(_) -> { 194 + io.println(" ℹ️ No priv/lexicons directory found, skipping import") 195 + } 196 + } 197 + 198 + // Start Jetstream consumer in background 199 + case jetstream_consumer.start(db) { 200 + Ok(_) -> Nil 201 + Error(err) -> { 202 + io.println_error("❌ Failed to start Jetstream consumer: " <> err) 203 + io.println(" Server will continue without real-time indexing") 204 + } 205 + } 206 + 207 + io.println("") 208 + io.println("=== ATProto Gleam ===") 209 + io.println("") 210 + 211 + // Start server immediately (this blocks) 212 + start_server(db) 213 + } 214 + 215 + fn start_server(db: sqlight.Connection) { 216 + wisp.configure_logger() 217 + let secret_key_base = wisp.random_string(64) 218 + 219 + // Get auth_base_url from environment variable or use default 220 + let auth_base_url = case envoy.get("AIP_BASE_URL") { 221 + Ok(url) -> url 222 + Error(_) -> "https://tunnel.chadtmiller.com" 223 + } 224 + 225 + io.println("🔐 Using AIP server: " <> auth_base_url) 226 + 227 + let ctx = Context(db: db, auth_base_url: auth_base_url) 228 + 229 + let handler = fn(req) { handle_request(req, ctx) } 230 + 231 + let assert Ok(_) = 232 + wisp_mist.handler(handler, secret_key_base) 233 + |> mist.new 234 + |> mist.port(8000) 235 + |> mist.start 236 + 237 + io.println("Server started on http://localhost:8000") 238 + process.sleep_forever() 239 + } 240 + 241 + fn start_jetstream(db: sqlight.Connection) { 242 + // Create a configuration for Jetstream 243 + // Listen to commit events only (posts, likes, reposts, follows) 244 + let config = 245 + jetstream.JetstreamConfig( 246 + endpoint: "wss://jetstream2.us-west.bsky.network/subscribe", 247 + wanted_collections: [], 248 + wanted_dids: [], 249 + ) 250 + 251 + // Start the consumer with an event handler that identifies commit events 252 + jetstream.start_consumer(config, fn(event_json) { 253 + case jetstream.parse_event(event_json) { 254 + jetstream.CommitEvent(did, _time_us, commit) -> { 255 + io.println("✨ COMMIT EVENT") 256 + io.println(" DID: " <> did) 257 + io.println(" Operation: " <> commit.operation) 258 + io.println(" Collection: " <> commit.collection) 259 + io.println(" Record key: " <> commit.rkey) 260 + io.println(" Revision: " <> commit.rev) 261 + io.println("---") 262 + } 263 + jetstream.IdentityEvent(did, _time_us, _identity) -> { 264 + io.println("👤 IDENTITY EVENT: " <> did) 265 + io.println("---") 266 + } 267 + jetstream.AccountEvent(did, _time_us, _account) -> { 268 + io.println("🔐 ACCOUNT EVENT: " <> did) 269 + io.println("---") 270 + } 271 + jetstream.UnknownEvent(_raw) -> { 272 + // Ignore unknown events 273 + Nil 274 + } 275 + } 276 + }) 277 + } 278 + 279 + fn handle_request(req: wisp.Request, ctx: Context) -> wisp.Response { 280 + use _req <- middleware(req) 281 + 282 + let segments = wisp.path_segments(req) 283 + 284 + case segments { 285 + [] -> index_route(ctx) 286 + ["graphql"] -> graphql_handler.handle_graphql_request(req, ctx.db) 287 + ["graphiql"] -> graphiql_handler.handle_graphiql_request(req) 288 + ["xrpc", _] -> { 289 + // Try to parse the XRPC route 290 + case xrpc_router.parse_xrpc_path(segments) { 291 + option.Some(route) -> { 292 + // Check if lexicon exists for this NSID 293 + case xrpc_router.validate_nsid(ctx.db, route.nsid) { 294 + True -> { 295 + // Route to the appropriate handler based on method 296 + case xrpc_router.parse_method(route.method) { 297 + xrpc_router.CreateRecord -> 298 + xrpc_handlers.handle_create_record( 299 + req, 300 + ctx.db, 301 + route.nsid, 302 + ctx.auth_base_url, 303 + ) 304 + xrpc_router.UpdateRecord -> 305 + xrpc_handlers.handle_update_record(req, ctx.db, route.nsid) 306 + xrpc_router.DeleteRecord -> 307 + xrpc_handlers.handle_delete_record(req, ctx.db, route.nsid) 308 + xrpc_router.GetRecord -> 309 + xrpc_handlers.handle_get_record(req, ctx.db, route.nsid) 310 + xrpc_router.UnknownMethod -> { 311 + wisp.response(404) 312 + |> wisp.set_header("content-type", "application/json") 313 + |> wisp.set_body(wisp.Text( 314 + "{\"error\": \"MethodNotSupported\", \"message\": \"Unknown XRPC method: " 315 + <> route.method 316 + <> "\"}", 317 + )) 318 + } 319 + } 320 + } 321 + False -> { 322 + // No lexicon found for this NSID 323 + wisp.response(404) 324 + |> wisp.set_header("content-type", "application/json") 325 + |> wisp.set_body(wisp.Text( 326 + "{\"error\": \"LexiconNotFound\", \"message\": \"No lexicon found for collection: " 327 + <> route.nsid 328 + <> "\"}", 329 + )) 330 + } 331 + } 332 + } 333 + option.None -> { 334 + // Invalid XRPC path format 335 + wisp.response(400) 336 + |> wisp.set_header("content-type", "application/json") 337 + |> wisp.set_body(wisp.Text( 338 + "{\"error\": \"InvalidRequest\", \"message\": \"Invalid XRPC path format\"}", 339 + )) 340 + } 341 + } 342 + } 343 + _ -> wisp.html_response("<h1>Not Found</h1>", 404) 344 + } 345 + } 346 + 347 + fn index_route(ctx: Context) -> wisp.Response { 348 + // Query database stats 349 + let collection_stats = case database.get_collection_stats(ctx.db) { 350 + Ok(stats) -> stats 351 + Error(_) -> [] 352 + } 353 + 354 + let actor_count = case database.get_actor_count(ctx.db) { 355 + Ok(count) -> count 356 + Error(_) -> 0 357 + } 358 + 359 + let lexicon_count = case database.get_lexicon_count(ctx.db) { 360 + Ok(count) -> count 361 + Error(_) -> 0 362 + } 363 + 364 + // Get record-type lexicons (collections) 365 + let record_lexicons = case database.get_record_type_lexicons(ctx.db) { 366 + Ok(lexicons) -> lexicons 367 + Error(_) -> [] 368 + } 369 + 370 + // Build collection rows from actual records 371 + let record_rows = 372 + collection_stats 373 + |> list.map(fn(stat) { 374 + html.tr([attribute.class("hover:bg-gray-50 transition-colors")], [ 375 + html.td([attribute.class("px-4 py-3 text-sm text-gray-900")], [ 376 + element.text(stat.collection), 377 + ]), 378 + html.td([attribute.class("px-4 py-3 text-sm text-gray-700")], [ 379 + element.text(int.to_string(stat.count)), 380 + ]), 381 + ]) 382 + }) 383 + 384 + // Build lexicon rows (show lexicons that don't have records yet) 385 + let lexicon_rows = 386 + record_lexicons 387 + |> list.filter(fn(lexicon) { 388 + // Only show lexicons that don't already appear in collection_stats 389 + !list.any(collection_stats, fn(stat) { stat.collection == lexicon.id }) 390 + }) 391 + |> list.map(fn(lexicon) { 392 + html.tr([attribute.class("hover:bg-gray-50 transition-colors")], [ 393 + html.td([attribute.class("px-4 py-3 text-sm text-gray-900")], [ 394 + element.text(lexicon.id), 395 + ]), 396 + html.td([attribute.class("px-4 py-3 text-sm text-gray-500 italic")], [ 397 + element.text("0"), 398 + ]), 399 + ]) 400 + }) 401 + 402 + // Combine both types of rows 403 + let collection_rows = list.append(record_rows, lexicon_rows) 404 + 405 + let page = 406 + html.html([attribute.class("h-full")], [ 407 + html.head([], [ 408 + html.title([], "ATProto Database Stats"), 409 + element.element("meta", [attribute.attribute("charset", "UTF-8")], []), 410 + element.element( 411 + "meta", 412 + [ 413 + attribute.attribute("name", "viewport"), 414 + attribute.attribute( 415 + "content", 416 + "width=device-width, initial-scale=1.0", 417 + ), 418 + ], 419 + [], 420 + ), 421 + element.element( 422 + "script", 423 + [attribute.attribute("src", "https://cdn.tailwindcss.com")], 424 + [], 425 + ), 426 + ]), 427 + html.body([attribute.class("bg-gray-50 min-h-screen p-8")], [ 428 + html.div([attribute.class("max-w-4xl mx-auto")], [ 429 + html.div([attribute.class("flex justify-between items-center mb-8")], [ 430 + html.h1([attribute.class("text-4xl font-bold text-gray-900")], [ 431 + element.text("quickslice"), 432 + ]), 433 + html.a( 434 + [ 435 + attribute.href("/graphiql"), 436 + attribute.class( 437 + "bg-purple-600 hover:bg-purple-700 text-white font-semibold py-2 px-4 rounded-lg transition-colors shadow-sm", 438 + ), 439 + ], 440 + [element.text("Open GraphiQL")], 441 + ), 442 + ]), 443 + // Lexicons section 444 + html.div([attribute.class("mb-8")], [ 445 + html.h2( 446 + [attribute.class("text-2xl font-semibold text-gray-700 mb-4")], 447 + [element.text("Lexicons")], 448 + ), 449 + html.div( 450 + [ 451 + attribute.class( 452 + "bg-purple-50 rounded-lg p-6 border border-purple-100 shadow-sm", 453 + ), 454 + ], 455 + [ 456 + html.div( 457 + [attribute.class("text-4xl font-bold text-purple-600 mb-2")], 458 + [element.text(int.to_string(lexicon_count))], 459 + ), 460 + html.div([attribute.class("text-gray-600")], [ 461 + element.text("Lexicon schemas loaded"), 462 + ]), 463 + ], 464 + ), 465 + ]), 466 + // Actors section 467 + html.div([attribute.class("mb-8")], [ 468 + html.h2( 469 + [attribute.class("text-2xl font-semibold text-gray-700 mb-4")], 470 + [element.text("Actors")], 471 + ), 472 + html.div( 473 + [ 474 + attribute.class( 475 + "bg-blue-50 rounded-lg p-6 border border-blue-100 shadow-sm", 476 + ), 477 + ], 478 + [ 479 + html.div( 480 + [attribute.class("text-4xl font-bold text-blue-600 mb-2")], 481 + [element.text(int.to_string(actor_count))], 482 + ), 483 + html.div([attribute.class("text-gray-600")], [ 484 + element.text("Total actors indexed"), 485 + ]), 486 + ], 487 + ), 488 + ]), 489 + // Collections section 490 + html.div([], [ 491 + html.h2( 492 + [attribute.class("text-2xl font-semibold text-gray-700 mb-4")], 493 + [element.text("Collections")], 494 + ), 495 + html.div( 496 + [ 497 + attribute.class( 498 + "bg-white rounded-lg shadow-sm border border-gray-200 overflow-hidden", 499 + ), 500 + ], 501 + [ 502 + html.table( 503 + [attribute.class("min-w-full divide-y divide-gray-200")], 504 + [ 505 + html.thead([attribute.class("bg-gray-50")], [ 506 + html.tr([], [ 507 + html.th( 508 + [ 509 + attribute.class( 510 + "px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider", 511 + ), 512 + ], 513 + [element.text("Collection")], 514 + ), 515 + html.th( 516 + [ 517 + attribute.class( 518 + "px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider", 519 + ), 520 + ], 521 + [element.text("Record Count")], 522 + ), 523 + ]), 524 + ]), 525 + html.tbody( 526 + [attribute.class("bg-white divide-y divide-gray-200")], 527 + collection_rows, 528 + ), 529 + ], 530 + ), 531 + ], 532 + ), 533 + ]), 534 + ]), 535 + ]), 536 + ]) 537 + 538 + let html_string = element.to_document_string(page) 539 + wisp.html_response(html_string, 200) 540 + } 541 + 542 + fn middleware( 543 + req: wisp.Request, 544 + handle_request: fn(wisp.Request) -> wisp.Response, 545 + ) -> wisp.Response { 546 + use <- wisp.rescue_crashes 547 + use <- wisp.log_request(req) 548 + use req <- wisp.handle_head(req) 549 + 550 + handle_request(req) 551 + } 552 + 553 + /// Backfills com.atproto.lexicon.schema collections on startup. 554 + /// This function auto-discovers repositories from the relay that have lexicon schemas 555 + /// and indexes them into the local database. 556 + /// Note: Actor indexing is disabled for lexicon schemas. 557 + fn backfill_lexicon_schemas(db: sqlight.Connection) { 558 + let repos = [] 559 + let collections = ["com.atproto.lexicon.schema"] 560 + let external_collections = [] 561 + let config = 562 + backfill.BackfillConfig( 563 + plc_directory_url: "https://plc.directory", 564 + index_actors: False, 565 + max_workers: 10, 566 + ) 567 + 568 + backfill.backfill_collections( 569 + repos, 570 + collections, 571 + external_collections, 572 + config, 573 + db, 574 + ) 575 + 576 + io.println("✅ Lexicon schema backfill complete") 577 + } 578 + 579 + /// Run a custom backfill for specific collections that have lexicons. 580 + /// This backfills actual records (posts, follows, etc.) after verifying lexicons exist. 581 + fn run_custom_backfill_for_collections( 582 + db: sqlight.Connection, 583 + collections: List(String), 584 + ) { 585 + let repos = [] 586 + let external_collections = [] 587 + let config = backfill.default_config() 588 + 589 + backfill.backfill_collections( 590 + repos, 591 + collections, 592 + external_collections, 593 + config, 594 + db, 595 + ) 596 + 597 + io.println("✅ Custom backfill complete") 598 + }
+354
server/src/xrpc_handlers.gleam
··· 1 + import atproto_auth 2 + import database 3 + import dpop 4 + import gleam/bit_array 5 + import gleam/http 6 + import gleam/http/request 7 + import gleam/list 8 + import gleam/string 9 + import lexicon 10 + import sqlight 11 + import wisp 12 + 13 + /// Handle createRecord XRPC method with authentication 14 + pub fn handle_create_record( 15 + req: wisp.Request, 16 + db: sqlight.Connection, 17 + nsid: String, 18 + auth_base_url: String, 19 + ) -> wisp.Response { 20 + // Only accept POST requests 21 + case req.method { 22 + http.Post -> { 23 + // Step 1: Extract bearer token from Authorization header 24 + case request.get_header(req, "authorization") { 25 + Error(_) -> unauthorized_response() 26 + Ok(auth_header) -> { 27 + // Parse "Bearer {token}" format 28 + case string.starts_with(auth_header, "Bearer ") { 29 + False -> unauthorized_response() 30 + True -> { 31 + let token = string.drop_start(auth_header, 7) 32 + handle_create_with_auth(req, db, nsid, token, auth_base_url) 33 + } 34 + } 35 + } 36 + } 37 + } 38 + _ -> { 39 + wisp.response(405) 40 + |> wisp.set_header("content-type", "application/json") 41 + |> wisp.set_body(wisp.Text( 42 + "{\"error\": \"MethodNotAllowed\", \"message\": \"Only POST is allowed\"}", 43 + )) 44 + } 45 + } 46 + } 47 + 48 + fn handle_create_with_auth( 49 + req: wisp.Request, 50 + _db: sqlight.Connection, 51 + _nsid: String, 52 + token: String, 53 + auth_base_url: String, 54 + ) -> wisp.Response { 55 + // Step 2: Get ATProto session (includes PDS URL, access token, and DPoP JWK) 56 + case atproto_auth.get_atproto_session(token, auth_base_url) { 57 + Error(atproto_auth.UnauthorizedToken) -> unauthorized_response() 58 + Error(_) -> internal_error_response("Failed to get ATProto session") 59 + Ok(session) -> { 60 + // Read the request body 61 + case wisp.read_body_bits(req) { 62 + Ok(body) -> { 63 + case bit_array.to_string(body) { 64 + Ok(body_string) -> { 65 + // Proxy to PDS with DPoP authentication 66 + let pds_url = 67 + session.pds_endpoint <> "/xrpc/com.atproto.repo.createRecord" 68 + 69 + case 70 + dpop.make_dpop_request("POST", pds_url, session, body_string) 71 + { 72 + Ok(pds_response) -> { 73 + wisp.response(pds_response.status) 74 + |> wisp.set_header("content-type", "application/json") 75 + |> wisp.set_body(wisp.Text(pds_response.body)) 76 + } 77 + Error(_) -> { 78 + internal_error_response("Failed to create record on PDS") 79 + } 80 + } 81 + } 82 + Error(_) -> { 83 + bad_request_response("Request body must be valid UTF-8") 84 + } 85 + } 86 + } 87 + Error(_) -> { 88 + bad_request_response("Failed to read request body") 89 + } 90 + } 91 + } 92 + } 93 + } 94 + 95 + /// Handle getRecord XRPC method 96 + pub fn handle_get_record( 97 + req: wisp.Request, 98 + db: sqlight.Connection, 99 + _nsid: String, 100 + ) -> wisp.Response { 101 + // Only accept GET requests 102 + case req.method { 103 + http.Get -> { 104 + // Extract URI from query parameters 105 + let query_params = wisp.get_query(req) 106 + case list.key_find(query_params, "uri") { 107 + Ok(uri) -> { 108 + // Fetch the record from database 109 + case database.get_record(db, uri) { 110 + Ok([record]) -> { 111 + // Return the record 112 + wisp.response(200) 113 + |> wisp.set_header("content-type", "application/json") 114 + |> wisp.set_body(wisp.Text( 115 + "{\"uri\": \"" 116 + <> record.uri 117 + <> "\", \"cid\": \"" 118 + <> record.cid 119 + <> "\", \"value\": " 120 + <> record.json 121 + <> "}", 122 + )) 123 + } 124 + Ok([]) -> { 125 + wisp.response(404) 126 + |> wisp.set_header("content-type", "application/json") 127 + |> wisp.set_body(wisp.Text( 128 + "{\"error\": \"RecordNotFound\", \"message\": \"Record not found: " 129 + <> uri 130 + <> "\"}", 131 + )) 132 + } 133 + Ok(_) -> { 134 + // Multiple records (unexpected, URI should be unique) 135 + wisp.response(500) 136 + |> wisp.set_header("content-type", "application/json") 137 + |> wisp.set_body(wisp.Text( 138 + "{\"error\": \"InternalError\", \"message\": \"Multiple records found\"}", 139 + )) 140 + } 141 + Error(_db_error) -> { 142 + wisp.response(500) 143 + |> wisp.set_header("content-type", "application/json") 144 + |> wisp.set_body(wisp.Text( 145 + "{\"error\": \"InternalError\", \"message\": \"Database error\"}", 146 + )) 147 + } 148 + } 149 + } 150 + Error(_) -> { 151 + wisp.response(400) 152 + |> wisp.set_header("content-type", "application/json") 153 + |> wisp.set_body(wisp.Text( 154 + "{\"error\": \"InvalidRequest\", \"message\": \"Missing required query parameter: uri\"}", 155 + )) 156 + } 157 + } 158 + } 159 + _ -> { 160 + wisp.response(405) 161 + |> wisp.set_header("content-type", "application/json") 162 + |> wisp.set_body(wisp.Text( 163 + "{\"error\": \"MethodNotAllowed\", \"message\": \"Only GET is allowed\"}", 164 + )) 165 + } 166 + } 167 + } 168 + 169 + /// Handle updateRecord XRPC method 170 + pub fn handle_update_record( 171 + req: wisp.Request, 172 + db: sqlight.Connection, 173 + nsid: String, 174 + ) -> wisp.Response { 175 + // Only accept POST requests (AT Protocol uses POST for updates) 176 + case req.method { 177 + http.Post -> { 178 + // Extract URI from query parameters 179 + let query_params = wisp.get_query(req) 180 + case list.key_find(query_params, "uri") { 181 + Ok(uri) -> { 182 + // Read the new record data 183 + case wisp.read_body_bits(req) { 184 + Ok(body) -> { 185 + case bit_array.to_string(body) { 186 + Ok(body_string) -> { 187 + // Get the lexicon for validation 188 + case database.get_lexicon(db, nsid) { 189 + Ok([lexicon_record]) -> { 190 + // Validate the new record against the lexicon 191 + case 192 + lexicon.validate_record( 193 + [lexicon_record.json], 194 + nsid, 195 + body_string, 196 + ) 197 + { 198 + Ok(_) -> { 199 + // TODO: Update the record in the database 200 + // This would require extracting the updated values 201 + // and calling database.insert_record (which upserts) 202 + 203 + wisp.response(200) 204 + |> wisp.set_header("content-type", "application/json") 205 + |> wisp.set_body(wisp.Text( 206 + "{\"uri\": \"" 207 + <> uri 208 + <> "\", \"cid\": \"bafyupdated\"}", 209 + )) 210 + } 211 + Error(validation_error) -> { 212 + wisp.response(400) 213 + |> wisp.set_header("content-type", "application/json") 214 + |> wisp.set_body(wisp.Text( 215 + "{\"error\": \"InvalidRecord\", \"message\": \"" 216 + <> lexicon.describe_error(validation_error) 217 + <> "\"}", 218 + )) 219 + } 220 + } 221 + } 222 + Ok([]) -> { 223 + wisp.response(404) 224 + |> wisp.set_header("content-type", "application/json") 225 + |> wisp.set_body(wisp.Text( 226 + "{\"error\": \"LexiconNotFound\", \"message\": \"No lexicon found for collection\"}", 227 + )) 228 + } 229 + Ok(_) -> { 230 + wisp.response(500) 231 + |> wisp.set_header("content-type", "application/json") 232 + |> wisp.set_body(wisp.Text( 233 + "{\"error\": \"InternalError\", \"message\": \"Multiple lexicons found\"}", 234 + )) 235 + } 236 + Error(_db_error) -> { 237 + wisp.response(500) 238 + |> wisp.set_header("content-type", "application/json") 239 + |> wisp.set_body(wisp.Text( 240 + "{\"error\": \"InternalError\", \"message\": \"Failed to fetch lexicon\"}", 241 + )) 242 + } 243 + } 244 + } 245 + Error(_) -> { 246 + wisp.response(400) 247 + |> wisp.set_header("content-type", "application/json") 248 + |> wisp.set_body(wisp.Text( 249 + "{\"error\": \"InvalidRequest\", \"message\": \"Request body must be valid UTF-8\"}", 250 + )) 251 + } 252 + } 253 + } 254 + Error(_) -> { 255 + wisp.response(400) 256 + |> wisp.set_header("content-type", "application/json") 257 + |> wisp.set_body(wisp.Text( 258 + "{\"error\": \"InvalidRequest\", \"message\": \"Failed to read request body\"}", 259 + )) 260 + } 261 + } 262 + } 263 + Error(_) -> { 264 + wisp.response(400) 265 + |> wisp.set_header("content-type", "application/json") 266 + |> wisp.set_body(wisp.Text( 267 + "{\"error\": \"InvalidRequest\", \"message\": \"Missing required query parameter: uri\"}", 268 + )) 269 + } 270 + } 271 + } 272 + _ -> { 273 + wisp.response(405) 274 + |> wisp.set_header("content-type", "application/json") 275 + |> wisp.set_body(wisp.Text( 276 + "{\"error\": \"MethodNotAllowed\", \"message\": \"Only POST is allowed\"}", 277 + )) 278 + } 279 + } 280 + } 281 + 282 + /// Handle deleteRecord XRPC method 283 + pub fn handle_delete_record( 284 + req: wisp.Request, 285 + db: sqlight.Connection, 286 + _nsid: String, 287 + ) -> wisp.Response { 288 + // Only accept POST requests (AT Protocol uses POST for deletes) 289 + case req.method { 290 + http.Post -> { 291 + // Extract URI from query parameters 292 + let query_params = wisp.get_query(req) 293 + case list.key_find(query_params, "uri") { 294 + Ok(uri) -> { 295 + // Delete the record from the database 296 + case database.delete_record(db, uri) { 297 + Ok(_) -> { 298 + wisp.response(200) 299 + |> wisp.set_header("content-type", "application/json") 300 + |> wisp.set_body(wisp.Text("{\"success\": true}")) 301 + } 302 + Error(_db_error) -> { 303 + wisp.response(500) 304 + |> wisp.set_header("content-type", "application/json") 305 + |> wisp.set_body(wisp.Text( 306 + "{\"error\": \"InternalError\", \"message\": \"Failed to delete record\"}", 307 + )) 308 + } 309 + } 310 + } 311 + Error(_) -> { 312 + wisp.response(400) 313 + |> wisp.set_header("content-type", "application/json") 314 + |> wisp.set_body(wisp.Text( 315 + "{\"error\": \"InvalidRequest\", \"message\": \"Missing required query parameter: uri\"}", 316 + )) 317 + } 318 + } 319 + } 320 + _ -> { 321 + wisp.response(405) 322 + |> wisp.set_header("content-type", "application/json") 323 + |> wisp.set_body(wisp.Text( 324 + "{\"error\": \"MethodNotAllowed\", \"message\": \"Only POST is allowed\"}", 325 + )) 326 + } 327 + } 328 + } 329 + 330 + // Helper response functions 331 + 332 + fn unauthorized_response() -> wisp.Response { 333 + wisp.response(401) 334 + |> wisp.set_header("content-type", "application/json") 335 + |> wisp.set_body(wisp.Text( 336 + "{\"error\": \"AuthRequired\", \"message\": \"Valid authorization required\"}", 337 + )) 338 + } 339 + 340 + fn bad_request_response(message: String) -> wisp.Response { 341 + wisp.response(400) 342 + |> wisp.set_header("content-type", "application/json") 343 + |> wisp.set_body(wisp.Text( 344 + "{\"error\": \"BadRequest\", \"message\": \"" <> message <> "\"}", 345 + )) 346 + } 347 + 348 + fn internal_error_response(message: String) -> wisp.Response { 349 + wisp.response(500) 350 + |> wisp.set_header("content-type", "application/json") 351 + |> wisp.set_body(wisp.Text( 352 + "{\"error\": \"InternalError\", \"message\": \"" <> message <> "\"}", 353 + )) 354 + }
+90
server/src/xrpc_router.gleam
··· 1 + import database 2 + import gleam/list 3 + import gleam/option.{type Option, None, Some} 4 + import gleam/string 5 + import sqlight 6 + 7 + /// Type representing a parsed XRPC request 8 + pub type XrpcRoute { 9 + XrpcRoute(nsid: String, method: String) 10 + } 11 + 12 + /// XRPC method types 13 + pub type XrpcMethod { 14 + CreateRecord 15 + UpdateRecord 16 + DeleteRecord 17 + GetRecord 18 + UnknownMethod 19 + } 20 + 21 + /// Parse an XRPC path into NSID and method 22 + /// Expected format: ["xrpc", "nsid.method"] 23 + /// Example: ["xrpc", "xyz.statusphere.status.createRecord"] 24 + pub fn parse_xrpc_path(segments: List(String)) -> Option(XrpcRoute) { 25 + case segments { 26 + ["xrpc", combined] -> { 27 + // Split the combined string by the last dot to separate NSID from method 28 + case split_nsid_and_method(combined) { 29 + Ok(#(nsid, method)) -> Some(XrpcRoute(nsid: nsid, method: method)) 30 + Error(_) -> None 31 + } 32 + } 33 + _ -> None 34 + } 35 + } 36 + 37 + /// Split a combined NSID.method string into separate parts 38 + /// Example: "xyz.statusphere.status.createRecord" -> ("xyz.statusphere.status", "createRecord") 39 + fn split_nsid_and_method(combined: String) -> Result(#(String, String), Nil) { 40 + // Split by dots 41 + let parts = string.split(combined, ".") 42 + 43 + // We need at least 4 parts for a valid NSID (e.g., xyz.statusphere.status.createRecord) 44 + // NSID typically has 3 parts (authority.name.record), method is the 4th+ 45 + case list_reverse_split_at(parts, 1) { 46 + Ok(#(method_parts, nsid_parts)) -> { 47 + let nsid = string.join(nsid_parts, ".") 48 + let method = string.join(method_parts, ".") 49 + Ok(#(nsid, method)) 50 + } 51 + Error(_) -> Error(Nil) 52 + } 53 + } 54 + 55 + /// Helper to split a list from the right side 56 + fn list_reverse_split_at( 57 + lst: List(a), 58 + n: Int, 59 + ) -> Result(#(List(a), List(a)), Nil) { 60 + let reversed = list.reverse(lst) 61 + case list.split(reversed, n) { 62 + #(suffix_rev, prefix_rev) -> { 63 + let prefix = list.reverse(prefix_rev) 64 + let suffix = list.reverse(suffix_rev) 65 + case list.is_empty(prefix) || list.is_empty(suffix) { 66 + True -> Error(Nil) 67 + False -> Ok(#(suffix, prefix)) 68 + } 69 + } 70 + } 71 + } 72 + 73 + /// Parse method name into XrpcMethod type 74 + pub fn parse_method(method: String) -> XrpcMethod { 75 + case method { 76 + "createRecord" -> CreateRecord 77 + "updateRecord" -> UpdateRecord 78 + "deleteRecord" -> DeleteRecord 79 + "getRecord" -> GetRecord 80 + _ -> UnknownMethod 81 + } 82 + } 83 + 84 + /// Check if a lexicon exists for the given NSID 85 + pub fn validate_nsid(db: sqlight.Connection, nsid: String) -> Bool { 86 + case database.has_lexicon_for_collection(db, nsid) { 87 + Ok(True) -> True 88 + _ -> False 89 + } 90 + }
+562
server/test/graphql_handler_integration_test.gleam
··· 1 + /// Integration tests for GraphQL handler with database 2 + /// 3 + /// These tests verify the full GraphQL query flow: 4 + /// 1. Database setup with lexicons and records 5 + /// 2. GraphQL schema building from database lexicons 6 + /// 3. Query execution and result formatting 7 + /// 4. JSON parsing and encoding throughout the pipeline 8 + import database 9 + import gleam/http 10 + import gleam/int 11 + import gleam/json 12 + import gleam/list 13 + import gleam/string 14 + import gleeunit/should 15 + import graphql_handler 16 + import sqlight 17 + import wisp 18 + import wisp/simulate 19 + 20 + // Helper to create a status lexicon 21 + fn create_status_lexicon() -> String { 22 + json.object([ 23 + #("lexicon", json.int(1)), 24 + #("id", json.string("xyz.statusphere.status")), 25 + #( 26 + "defs", 27 + json.object([ 28 + #( 29 + "main", 30 + json.object([ 31 + #("type", json.string("record")), 32 + #("key", json.string("tid")), 33 + #( 34 + "record", 35 + json.object([ 36 + #("type", json.string("object")), 37 + #( 38 + "required", 39 + json.array( 40 + [json.string("status"), json.string("createdAt")], 41 + of: fn(x) { x }, 42 + ), 43 + ), 44 + #( 45 + "properties", 46 + json.object([ 47 + #( 48 + "status", 49 + json.object([ 50 + #("type", json.string("string")), 51 + #("minLength", json.int(1)), 52 + #("maxGraphemes", json.int(1)), 53 + #("maxLength", json.int(32)), 54 + ]), 55 + ), 56 + #( 57 + "createdAt", 58 + json.object([ 59 + #("type", json.string("string")), 60 + #("format", json.string("datetime")), 61 + ]), 62 + ), 63 + ]), 64 + ), 65 + ]), 66 + ), 67 + ]), 68 + ), 69 + ]), 70 + ), 71 + ]) 72 + |> json.to_string 73 + } 74 + 75 + // Helper to create a simple lexicon with just properties 76 + fn create_simple_lexicon(nsid: String) -> String { 77 + json.object([ 78 + #("lexicon", json.int(1)), 79 + #("id", json.string(nsid)), 80 + #( 81 + "defs", 82 + json.object([ 83 + #( 84 + "main", 85 + json.object([ 86 + #("type", json.string("record")), 87 + #( 88 + "record", 89 + json.object([ 90 + #( 91 + "properties", 92 + json.object([ 93 + #("status", json.object([#("type", json.string("string"))])), 94 + ]), 95 + ), 96 + ]), 97 + ), 98 + ]), 99 + ), 100 + ]), 101 + ), 102 + ]) 103 + |> json.to_string 104 + } 105 + 106 + pub fn graphql_post_request_with_records_test() { 107 + // Create in-memory database 108 + let assert Ok(db) = sqlight.open(":memory:") 109 + let assert Ok(_) = database.create_lexicon_table(db) 110 + let assert Ok(_) = database.create_record_table(db) 111 + 112 + // Insert a lexicon for xyz.statusphere.status 113 + let lexicon = create_status_lexicon() 114 + let assert Ok(_) = 115 + database.insert_lexicon(db, "xyz.statusphere.status", lexicon) 116 + 117 + // Insert some test records 118 + let record1_json = 119 + json.object([ 120 + #("status", json.string("🎉")), 121 + #("createdAt", json.string("2024-01-01T00:00:00Z")), 122 + ]) 123 + |> json.to_string 124 + 125 + let assert Ok(_) = 126 + database.insert_record( 127 + db, 128 + "at://did:plc:test1/xyz.statusphere.status/123", 129 + "cid1", 130 + "did:plc:test1", 131 + "xyz.statusphere.status", 132 + record1_json, 133 + ) 134 + 135 + let record2_json = 136 + json.object([ 137 + #("status", json.string("🔥")), 138 + #("createdAt", json.string("2024-01-02T00:00:00Z")), 139 + ]) 140 + |> json.to_string 141 + 142 + let assert Ok(_) = 143 + database.insert_record( 144 + db, 145 + "at://did:plc:test2/xyz.statusphere.status/456", 146 + "cid2", 147 + "did:plc:test2", 148 + "xyz.statusphere.status", 149 + record2_json, 150 + ) 151 + 152 + // Create GraphQL query request 153 + let query = 154 + json.object([ 155 + #( 156 + "query", 157 + json.string( 158 + "{ xyzStatusphereStatus { uri cid did collection status createdAt } }", 159 + ), 160 + ), 161 + ]) 162 + |> json.to_string 163 + 164 + let request = 165 + simulate.request(http.Post, "/graphql") 166 + |> simulate.string_body(query) 167 + |> simulate.header("content-type", "application/json") 168 + 169 + let response = graphql_handler.handle_graphql_request(request, db) 170 + 171 + // Verify response 172 + response.status 173 + |> should.equal(200) 174 + 175 + // Get response body 176 + let assert wisp.Text(body) = response.body 177 + 178 + // Verify response contains data structure 179 + body 180 + |> should.not_equal("") 181 + 182 + // Response should contain "data" 183 + string.contains(body, "data") 184 + |> should.be_true 185 + 186 + // Response should contain field name 187 + string.contains(body, "xyzStatusphereStatus") 188 + |> should.be_true 189 + 190 + // Response should contain our test URIs 191 + string.contains(body, "at://did:plc:test1/xyz.statusphere.status/123") 192 + |> should.be_true 193 + 194 + string.contains(body, "at://did:plc:test2/xyz.statusphere.status/456") 195 + |> should.be_true 196 + 197 + // Response should contain our test data 198 + string.contains(body, "🎉") 199 + |> should.be_true 200 + 201 + string.contains(body, "🔥") 202 + |> should.be_true 203 + 204 + // Clean up 205 + let assert Ok(_) = sqlight.close(db) 206 + } 207 + 208 + pub fn graphql_post_request_empty_results_test() { 209 + // Create in-memory database 210 + let assert Ok(db) = sqlight.open(":memory:") 211 + let assert Ok(_) = database.create_lexicon_table(db) 212 + let assert Ok(_) = database.create_record_table(db) 213 + 214 + // Insert a lexicon but no records 215 + let lexicon = create_simple_lexicon("xyz.statusphere.status") 216 + let assert Ok(_) = 217 + database.insert_lexicon(db, "xyz.statusphere.status", lexicon) 218 + 219 + // Create GraphQL query request 220 + let query = 221 + json.object([#("query", json.string("{ xyzStatusphereStatus { uri } }"))]) 222 + |> json.to_string 223 + 224 + let request = 225 + simulate.request(http.Post, "/graphql") 226 + |> simulate.string_body(query) 227 + |> simulate.header("content-type", "application/json") 228 + 229 + let response = graphql_handler.handle_graphql_request(request, db) 230 + 231 + // Verify response 232 + response.status 233 + |> should.equal(200) 234 + 235 + // Get response body 236 + let assert wisp.Text(body) = response.body 237 + 238 + // Should return empty array 239 + string.contains(body, "[]") 240 + |> should.be_true 241 + 242 + // Clean up 243 + let assert Ok(_) = sqlight.close(db) 244 + } 245 + 246 + pub fn graphql_get_request_test() { 247 + // Create in-memory database 248 + let assert Ok(db) = sqlight.open(":memory:") 249 + let assert Ok(_) = database.create_lexicon_table(db) 250 + let assert Ok(_) = database.create_record_table(db) 251 + 252 + // Insert a lexicon 253 + let lexicon = create_simple_lexicon("xyz.statusphere.status") 254 + let assert Ok(_) = 255 + database.insert_lexicon(db, "xyz.statusphere.status", lexicon) 256 + 257 + // Create GraphQL GET request with query parameter 258 + let request = 259 + simulate.request( 260 + http.Get, 261 + "/graphql?query={ xyzStatusphereStatus { uri } }", 262 + ) 263 + 264 + let response = graphql_handler.handle_graphql_request(request, db) 265 + 266 + // Verify response 267 + response.status 268 + |> should.equal(200) 269 + 270 + // Get response body 271 + let assert wisp.Text(body) = response.body 272 + 273 + // Should contain data 274 + string.contains(body, "data") 275 + |> should.be_true 276 + 277 + // Clean up 278 + let assert Ok(_) = sqlight.close(db) 279 + } 280 + 281 + pub fn graphql_invalid_json_request_test() { 282 + // Create in-memory database 283 + let assert Ok(db) = sqlight.open(":memory:") 284 + let assert Ok(_) = database.create_lexicon_table(db) 285 + 286 + // Create request with invalid JSON 287 + let request = 288 + simulate.request(http.Post, "/graphql") 289 + |> simulate.string_body("not valid json") 290 + |> simulate.header("content-type", "application/json") 291 + 292 + let response = graphql_handler.handle_graphql_request(request, db) 293 + 294 + // Should return 400 Bad Request 295 + response.status 296 + |> should.equal(400) 297 + 298 + // Get response body 299 + let assert wisp.Text(body) = response.body 300 + 301 + // Should contain error 302 + string.contains(body, "error") 303 + |> should.be_true 304 + 305 + // Clean up 306 + let assert Ok(_) = sqlight.close(db) 307 + } 308 + 309 + pub fn graphql_missing_query_field_test() { 310 + // Create in-memory database 311 + let assert Ok(db) = sqlight.open(":memory:") 312 + let assert Ok(_) = database.create_lexicon_table(db) 313 + 314 + // Create request with JSON but no query field 315 + let body_json = 316 + json.object([#("foo", json.string("bar"))]) 317 + |> json.to_string 318 + 319 + let request = 320 + simulate.request(http.Post, "/graphql") 321 + |> simulate.string_body(body_json) 322 + |> simulate.header("content-type", "application/json") 323 + 324 + let response = graphql_handler.handle_graphql_request(request, db) 325 + 326 + // Should return 400 Bad Request 327 + response.status 328 + |> should.equal(400) 329 + 330 + // Get response body 331 + let assert wisp.Text(body) = response.body 332 + 333 + // Should contain error about missing query 334 + string.contains(body, "query") 335 + |> should.be_true 336 + 337 + // Clean up 338 + let assert Ok(_) = sqlight.close(db) 339 + } 340 + 341 + pub fn graphql_method_not_allowed_test() { 342 + // Create in-memory database 343 + let assert Ok(db) = sqlight.open(":memory:") 344 + 345 + // Create DELETE request (not allowed) 346 + let request = simulate.request(http.Delete, "/graphql") 347 + 348 + let response = graphql_handler.handle_graphql_request(request, db) 349 + 350 + // Should return 405 Method Not Allowed 351 + response.status 352 + |> should.equal(405) 353 + 354 + // Get response body 355 + let assert wisp.Text(body) = response.body 356 + 357 + // Should contain error 358 + string.contains(body, "MethodNotAllowed") 359 + |> should.be_true 360 + 361 + // Clean up 362 + let assert Ok(_) = sqlight.close(db) 363 + } 364 + 365 + pub fn graphql_multiple_lexicons_test() { 366 + // Create in-memory database 367 + let assert Ok(db) = sqlight.open(":memory:") 368 + let assert Ok(_) = database.create_lexicon_table(db) 369 + let assert Ok(_) = database.create_record_table(db) 370 + 371 + // Insert multiple lexicons 372 + let lexicon1 = create_simple_lexicon("xyz.statusphere.status") 373 + let lexicon2 = 374 + json.object([ 375 + #("lexicon", json.int(1)), 376 + #("id", json.string("app.bsky.feed.post")), 377 + #( 378 + "defs", 379 + json.object([ 380 + #( 381 + "main", 382 + json.object([ 383 + #("type", json.string("record")), 384 + #( 385 + "record", 386 + json.object([ 387 + #( 388 + "properties", 389 + json.object([ 390 + #("text", json.object([#("type", json.string("string"))])), 391 + #( 392 + "createdAt", 393 + json.object([#("type", json.string("string"))]), 394 + ), 395 + ]), 396 + ), 397 + ]), 398 + ), 399 + ]), 400 + ), 401 + ]), 402 + ), 403 + ]) 404 + |> json.to_string 405 + 406 + let assert Ok(_) = 407 + database.insert_lexicon(db, "xyz.statusphere.status", lexicon1) 408 + let assert Ok(_) = database.insert_lexicon(db, "app.bsky.feed.post", lexicon2) 409 + 410 + // Insert records for first collection 411 + let record1_json = 412 + json.object([#("status", json.string("✨"))]) 413 + |> json.to_string 414 + 415 + let assert Ok(_) = 416 + database.insert_record( 417 + db, 418 + "at://did:plc:test/xyz.statusphere.status/1", 419 + "cid1", 420 + "did:plc:test", 421 + "xyz.statusphere.status", 422 + record1_json, 423 + ) 424 + 425 + // Query the first collection 426 + let query1 = 427 + json.object([#("query", json.string("{ xyzStatusphereStatus { uri } }"))]) 428 + |> json.to_string 429 + let request1 = 430 + simulate.request(http.Post, "/graphql") 431 + |> simulate.string_body(query1) 432 + |> simulate.header("content-type", "application/json") 433 + 434 + let response1 = graphql_handler.handle_graphql_request(request1, db) 435 + 436 + response1.status 437 + |> should.equal(200) 438 + 439 + let assert wisp.Text(body1) = response1.body 440 + 441 + string.contains(body1, "xyzStatusphereStatus") 442 + |> should.be_true 443 + 444 + // Insert records for second collection 445 + let record2_json = 446 + json.object([ 447 + #("text", json.string("Hello World")), 448 + #("createdAt", json.string("2024-01-01T00:00:00Z")), 449 + ]) 450 + |> json.to_string 451 + 452 + let assert Ok(_) = 453 + database.insert_record( 454 + db, 455 + "at://did:plc:test/app.bsky.feed.post/1", 456 + "cid2", 457 + "did:plc:test", 458 + "app.bsky.feed.post", 459 + record2_json, 460 + ) 461 + 462 + // Query the second collection 463 + let query2 = 464 + json.object([#("query", json.string("{ appBskyFeedPost { uri } }"))]) 465 + |> json.to_string 466 + let request2 = 467 + simulate.request(http.Post, "/graphql") 468 + |> simulate.string_body(query2) 469 + |> simulate.header("content-type", "application/json") 470 + 471 + let response2 = graphql_handler.handle_graphql_request(request2, db) 472 + 473 + response2.status 474 + |> should.equal(200) 475 + 476 + let assert wisp.Text(body2) = response2.body 477 + 478 + string.contains(body2, "appBskyFeedPost") 479 + |> should.be_true 480 + 481 + // Clean up 482 + let assert Ok(_) = sqlight.close(db) 483 + } 484 + 485 + pub fn graphql_record_limit_test() { 486 + // Create in-memory database 487 + let assert Ok(db) = sqlight.open(":memory:") 488 + let assert Ok(_) = database.create_lexicon_table(db) 489 + let assert Ok(_) = database.create_record_table(db) 490 + 491 + // Insert a lexicon 492 + let lexicon = create_simple_lexicon("xyz.statusphere.status") 493 + let assert Ok(_) = 494 + database.insert_lexicon(db, "xyz.statusphere.status", lexicon) 495 + 496 + // Insert 150 records (handler should limit to 100) 497 + let _ = 498 + list_range(1, 150) 499 + |> list.each(fn(i) { 500 + let uri = "at://did:plc:test/xyz.statusphere.status/" <> int.to_string(i) 501 + let cid = "cid" <> int.to_string(i) 502 + let json_data = 503 + json.object([#("status", json.string(int.to_string(i)))]) 504 + |> json.to_string 505 + let assert Ok(_) = 506 + database.insert_record( 507 + db, 508 + uri, 509 + cid, 510 + "did:plc:test", 511 + "xyz.statusphere.status", 512 + json_data, 513 + ) 514 + Nil 515 + }) 516 + 517 + // Query all records 518 + let query = 519 + json.object([#("query", json.string("{ xyzStatusphereStatus { uri } }"))]) 520 + |> json.to_string 521 + let request = 522 + simulate.request(http.Post, "/graphql") 523 + |> simulate.string_body(query) 524 + |> simulate.header("content-type", "application/json") 525 + 526 + let response = graphql_handler.handle_graphql_request(request, db) 527 + 528 + response.status 529 + |> should.equal(200) 530 + 531 + let assert wisp.Text(body) = response.body 532 + 533 + // Count how many URIs are in the response (should be exactly 100) 534 + let uri_count = count_occurrences(body, "\"uri\"") 535 + 536 + // Should return exactly 100 records (not all 150) 537 + uri_count 538 + |> should.equal(100) 539 + 540 + // Clean up 541 + let assert Ok(_) = sqlight.close(db) 542 + } 543 + 544 + // Helper function to create a range of integers 545 + fn list_range(from: Int, to: Int) -> List(Int) { 546 + list_range_helper(from, to, []) 547 + |> list.reverse 548 + } 549 + 550 + fn list_range_helper(current: Int, to: Int, acc: List(Int)) -> List(Int) { 551 + case current > to { 552 + True -> acc 553 + False -> list_range_helper(current + 1, to, [current, ..acc]) 554 + } 555 + } 556 + 557 + // Helper to count occurrences of a substring 558 + fn count_occurrences(text: String, pattern: String) -> Int { 559 + string.split(text, pattern) 560 + |> list.length 561 + |> fn(n) { n - 1 } 562 + }
+43
server/test/jose_test.gleam
··· 1 + import gleam/option 2 + import gleeunit 3 + import gleeunit/should 4 + import jose_wrapper 5 + 6 + pub fn main() { 7 + gleeunit.main() 8 + } 9 + 10 + pub fn generate_dpop_proof_test() { 11 + // Fake test JWK (not a real key - for testing only) 12 + let jwk_json = 13 + "{\"kid\":\"did:key:zFAKEKEYFORTESTINGONLY123456789\",\"alg\":\"ES256\",\"use\":\"sig\",\"kty\":\"EC\",\"crv\":\"P-256\",\"x\":\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\",\"y\":\"BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\",\"d\":\"CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC\"}" 14 + 15 + let method = "POST" 16 + let url = 17 + "https://leccinum.us-west.host.bsky.network/xrpc/com.atproto.repo.createRecord" 18 + let access_token = "test_token_12345" 19 + 20 + case 21 + jose_wrapper.generate_dpop_proof_with_nonce( 22 + method, 23 + url, 24 + access_token, 25 + jwk_json, 26 + option.None, 27 + ) 28 + { 29 + Ok(dpop_proof) -> { 30 + // DPoP proof should be a JWT (three parts separated by dots) 31 + should.not_equal(dpop_proof, "") 32 + 33 + // Should start with eyJ (base64 encoded JSON header) 34 + should.be_true(case dpop_proof { 35 + "eyJ" <> _ -> True 36 + _ -> False 37 + }) 38 + } 39 + Error(_err) -> { 40 + should.fail() 41 + } 42 + } 43 + }
+5
server/test/server_test.gleam
··· 1 + import gleeunit 2 + 3 + pub fn main() -> Nil { 4 + gleeunit.main() 5 + }
+368
server/test/xrpc_handlers_test.gleam
··· 1 + import database 2 + import gleam/http 3 + import gleeunit/should 4 + import sqlight 5 + import wisp/simulate 6 + import xrpc_handlers 7 + 8 + // Test configuration 9 + const test_auth_base_url = "https://auth.test.example.com" 10 + 11 + // Helper function to create a test database 12 + fn setup_test_db() -> sqlight.Connection { 13 + let assert Ok(db) = sqlight.open(":memory:") 14 + 15 + // Create tables 16 + let assert Ok(_) = database.create_record_table(db) 17 + let assert Ok(_) = database.create_lexicon_table(db) 18 + 19 + db 20 + } 21 + 22 + // Helper to insert a test lexicon 23 + fn insert_test_lexicon(db: sqlight.Connection, nsid: String) -> Nil { 24 + let lexicon_json = "{ 25 + \"lexicon\": 1, 26 + \"id\": \"" <> nsid <> "\", 27 + \"defs\": { 28 + \"main\": { 29 + \"type\": \"record\", 30 + \"key\": \"tid\", 31 + \"record\": { 32 + \"type\": \"object\", 33 + \"required\": [\"text\", \"createdAt\"], 34 + \"properties\": { 35 + \"text\": { 36 + \"type\": \"string\", 37 + \"minLength\": 1, 38 + \"maxLength\": 300 39 + }, 40 + \"createdAt\": { 41 + \"type\": \"string\", 42 + \"format\": \"datetime\" 43 + } 44 + } 45 + } 46 + } 47 + } 48 + }" 49 + 50 + let assert Ok(_) = database.insert_lexicon(db, nsid, lexicon_json) 51 + Nil 52 + } 53 + 54 + // Helper to insert a test record 55 + fn insert_test_record( 56 + db: sqlight.Connection, 57 + uri: String, 58 + collection: String, 59 + ) -> Nil { 60 + let record_json = 61 + "{\"text\": \"Hello world\", \"createdAt\": \"2025-01-01T00:00:00Z\"}" 62 + 63 + let assert Ok(_) = 64 + database.insert_record( 65 + db, 66 + uri, 67 + "bafytest123", 68 + "did:plc:test123", 69 + collection, 70 + record_json, 71 + ) 72 + Nil 73 + } 74 + 75 + // Tests for createRecord 76 + 77 + pub fn create_record_with_valid_data_test() { 78 + let db = setup_test_db() 79 + let nsid = "xyz.statusphere.status" 80 + insert_test_lexicon(db, nsid) 81 + 82 + let body = 83 + "{\"text\": \"Test status\", \"createdAt\": \"2025-01-01T12:00:00Z\"}" 84 + 85 + let request = 86 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".createRecord") 87 + |> simulate.string_body(body) 88 + |> simulate.header("content-type", "application/json") 89 + 90 + let response = 91 + xrpc_handlers.handle_create_record(request, db, nsid, test_auth_base_url) 92 + 93 + // Now requires authentication, should return 401 without auth header 94 + response.status 95 + |> should.equal(401) 96 + } 97 + 98 + pub fn create_record_with_invalid_json_test() { 99 + let db = setup_test_db() 100 + let nsid = "xyz.statusphere.status" 101 + insert_test_lexicon(db, nsid) 102 + 103 + let body = "{invalid json" 104 + 105 + let request = 106 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".createRecord") 107 + |> simulate.string_body(body) 108 + |> simulate.header("content-type", "application/json") 109 + 110 + let response = 111 + xrpc_handlers.handle_create_record(request, db, nsid, test_auth_base_url) 112 + 113 + // Auth required first, so 401 instead of 400 114 + response.status 115 + |> should.equal(401) 116 + } 117 + 118 + pub fn create_record_with_missing_required_field_test() { 119 + let db = setup_test_db() 120 + let nsid = "xyz.statusphere.status" 121 + insert_test_lexicon(db, nsid) 122 + 123 + // Missing "createdAt" field 124 + let body = "{\"text\": \"Test status\"}" 125 + 126 + let request = 127 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".createRecord") 128 + |> simulate.string_body(body) 129 + |> simulate.header("content-type", "application/json") 130 + 131 + let response = 132 + xrpc_handlers.handle_create_record(request, db, nsid, test_auth_base_url) 133 + 134 + // Auth required first, so 401 instead of 400 135 + response.status 136 + |> should.equal(401) 137 + } 138 + 139 + pub fn create_record_with_wrong_http_method_test() { 140 + let db = setup_test_db() 141 + let nsid = "xyz.statusphere.status" 142 + insert_test_lexicon(db, nsid) 143 + 144 + let request = simulate.request(http.Get, "/xrpc/" <> nsid <> ".createRecord") 145 + 146 + let response = 147 + xrpc_handlers.handle_create_record(request, db, nsid, test_auth_base_url) 148 + 149 + // Method check happens before auth, still 405 150 + response.status 151 + |> should.equal(405) 152 + } 153 + 154 + pub fn create_record_without_lexicon_test() { 155 + let db = setup_test_db() 156 + let nsid = "xyz.nonexistent.collection" 157 + // Don't insert lexicon 158 + 159 + let body = "{\"text\": \"Test\", \"createdAt\": \"2025-01-01T12:00:00Z\"}" 160 + 161 + let request = 162 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".createRecord") 163 + |> simulate.string_body(body) 164 + |> simulate.header("content-type", "application/json") 165 + 166 + let response = 167 + xrpc_handlers.handle_create_record(request, db, nsid, test_auth_base_url) 168 + 169 + // Auth required first, so 401 instead of 404 170 + response.status 171 + |> should.equal(401) 172 + } 173 + 174 + // Tests for getRecord 175 + 176 + pub fn get_record_success_test() { 177 + let db = setup_test_db() 178 + let nsid = "xyz.statusphere.status" 179 + let uri = "at://did:plc:test123/" <> nsid <> "/abc123" 180 + 181 + insert_test_lexicon(db, nsid) 182 + insert_test_record(db, uri, nsid) 183 + 184 + let request = 185 + simulate.request(http.Get, "/xrpc/" <> nsid <> ".getRecord?uri=" <> uri) 186 + 187 + let response = xrpc_handlers.handle_get_record(request, db, nsid) 188 + 189 + response.status 190 + |> should.equal(200) 191 + } 192 + 193 + pub fn get_record_not_found_test() { 194 + let db = setup_test_db() 195 + let nsid = "xyz.statusphere.status" 196 + let uri = "at://did:plc:test123/" <> nsid <> "/nonexistent" 197 + 198 + insert_test_lexicon(db, nsid) 199 + 200 + let request = 201 + simulate.request(http.Get, "/xrpc/" <> nsid <> ".getRecord?uri=" <> uri) 202 + 203 + let response = xrpc_handlers.handle_get_record(request, db, nsid) 204 + 205 + response.status 206 + |> should.equal(404) 207 + } 208 + 209 + pub fn get_record_missing_uri_param_test() { 210 + let db = setup_test_db() 211 + let nsid = "xyz.statusphere.status" 212 + 213 + insert_test_lexicon(db, nsid) 214 + 215 + let request = simulate.request(http.Get, "/xrpc/" <> nsid <> ".getRecord") 216 + 217 + let response = xrpc_handlers.handle_get_record(request, db, nsid) 218 + 219 + response.status 220 + |> should.equal(400) 221 + } 222 + 223 + pub fn get_record_wrong_http_method_test() { 224 + let db = setup_test_db() 225 + let nsid = "xyz.statusphere.status" 226 + let uri = "at://did:plc:test123/" <> nsid <> "/abc123" 227 + 228 + insert_test_lexicon(db, nsid) 229 + 230 + let request = 231 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".getRecord?uri=" <> uri) 232 + 233 + let response = xrpc_handlers.handle_get_record(request, db, nsid) 234 + 235 + response.status 236 + |> should.equal(405) 237 + } 238 + 239 + // Tests for updateRecord 240 + 241 + pub fn update_record_with_valid_data_test() { 242 + let db = setup_test_db() 243 + let nsid = "xyz.statusphere.status" 244 + let uri = "at://did:plc:test123/" <> nsid <> "/abc123" 245 + 246 + insert_test_lexicon(db, nsid) 247 + insert_test_record(db, uri, nsid) 248 + 249 + let body = 250 + "{\"text\": \"Updated status\", \"createdAt\": \"2025-01-01T12:00:00Z\"}" 251 + 252 + let request = 253 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".updateRecord?uri=" <> uri) 254 + |> simulate.string_body(body) 255 + |> simulate.header("content-type", "application/json") 256 + 257 + let response = xrpc_handlers.handle_update_record(request, db, nsid) 258 + 259 + response.status 260 + |> should.equal(200) 261 + } 262 + 263 + pub fn update_record_with_invalid_data_test() { 264 + let db = setup_test_db() 265 + let nsid = "xyz.statusphere.status" 266 + let uri = "at://did:plc:test123/" <> nsid <> "/abc123" 267 + 268 + insert_test_lexicon(db, nsid) 269 + insert_test_record(db, uri, nsid) 270 + 271 + // Missing required field 272 + let body = "{\"text\": \"Updated status\"}" 273 + 274 + let request = 275 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".updateRecord?uri=" <> uri) 276 + |> simulate.string_body(body) 277 + |> simulate.header("content-type", "application/json") 278 + 279 + let response = xrpc_handlers.handle_update_record(request, db, nsid) 280 + 281 + response.status 282 + |> should.equal(400) 283 + } 284 + 285 + pub fn update_record_missing_uri_param_test() { 286 + let db = setup_test_db() 287 + let nsid = "xyz.statusphere.status" 288 + 289 + insert_test_lexicon(db, nsid) 290 + 291 + let body = "{\"text\": \"Updated\", \"createdAt\": \"2025-01-01T12:00:00Z\"}" 292 + 293 + let request = 294 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".updateRecord") 295 + |> simulate.string_body(body) 296 + |> simulate.header("content-type", "application/json") 297 + 298 + let response = xrpc_handlers.handle_update_record(request, db, nsid) 299 + 300 + response.status 301 + |> should.equal(400) 302 + } 303 + 304 + pub fn update_record_wrong_http_method_test() { 305 + let db = setup_test_db() 306 + let nsid = "xyz.statusphere.status" 307 + let uri = "at://did:plc:test123/" <> nsid <> "/abc123" 308 + 309 + insert_test_lexicon(db, nsid) 310 + 311 + let request = 312 + simulate.request(http.Get, "/xrpc/" <> nsid <> ".updateRecord?uri=" <> uri) 313 + 314 + let response = xrpc_handlers.handle_update_record(request, db, nsid) 315 + 316 + response.status 317 + |> should.equal(405) 318 + } 319 + 320 + // Tests for deleteRecord 321 + 322 + pub fn delete_record_success_test() { 323 + let db = setup_test_db() 324 + let nsid = "xyz.statusphere.status" 325 + let uri = "at://did:plc:test123/" <> nsid <> "/abc123" 326 + 327 + insert_test_lexicon(db, nsid) 328 + insert_test_record(db, uri, nsid) 329 + 330 + let request = 331 + simulate.request(http.Post, "/xrpc/" <> nsid <> ".deleteRecord?uri=" <> uri) 332 + 333 + let response = xrpc_handlers.handle_delete_record(request, db, nsid) 334 + 335 + response.status 336 + |> should.equal(200) 337 + } 338 + 339 + pub fn delete_record_missing_uri_param_test() { 340 + let db = setup_test_db() 341 + let nsid = "xyz.statusphere.status" 342 + 343 + insert_test_lexicon(db, nsid) 344 + 345 + let request = simulate.request(http.Post, "/xrpc/" <> nsid <> ".deleteRecord") 346 + 347 + let response = xrpc_handlers.handle_delete_record(request, db, nsid) 348 + 349 + response.status 350 + |> should.equal(400) 351 + } 352 + 353 + pub fn delete_record_wrong_http_method_test() { 354 + let db = setup_test_db() 355 + let nsid = "xyz.statusphere.status" 356 + let uri = "at://did:plc:test123/" <> nsid <> "/abc123" 357 + 358 + insert_test_lexicon(db, nsid) 359 + 360 + // Use GET instead of POST to test wrong method 361 + let request = 362 + simulate.request(http.Get, "/xrpc/" <> nsid <> ".deleteRecord?uri=" <> uri) 363 + 364 + let response = xrpc_handlers.handle_delete_record(request, db, nsid) 365 + 366 + response.status 367 + |> should.equal(405) 368 + }
+125
server/test/xrpc_router_test.gleam
··· 1 + import gleam/option 2 + import gleeunit/should 3 + import xrpc_router 4 + 5 + // Test XRPC path parsing 6 + 7 + pub fn parse_valid_xrpc_path_test() { 8 + let segments = ["xrpc", "xyz.statusphere.status.createRecord"] 9 + let result = xrpc_router.parse_xrpc_path(segments) 10 + 11 + result 12 + |> should.equal( 13 + option.Some(xrpc_router.XrpcRoute( 14 + nsid: "xyz.statusphere.status", 15 + method: "createRecord", 16 + )), 17 + ) 18 + } 19 + 20 + pub fn parse_xrpc_path_with_longer_nsid_test() { 21 + let segments = ["xrpc", "com.example.app.post.getRecord"] 22 + let result = xrpc_router.parse_xrpc_path(segments) 23 + 24 + result 25 + |> should.equal( 26 + option.Some(xrpc_router.XrpcRoute( 27 + nsid: "com.example.app.post", 28 + method: "getRecord", 29 + )), 30 + ) 31 + } 32 + 33 + pub fn parse_xrpc_path_update_record_test() { 34 + let segments = ["xrpc", "xyz.statusphere.status.updateRecord"] 35 + let result = xrpc_router.parse_xrpc_path(segments) 36 + 37 + result 38 + |> should.equal( 39 + option.Some(xrpc_router.XrpcRoute( 40 + nsid: "xyz.statusphere.status", 41 + method: "updateRecord", 42 + )), 43 + ) 44 + } 45 + 46 + pub fn parse_xrpc_path_delete_record_test() { 47 + let segments = ["xrpc", "xyz.statusphere.status.deleteRecord"] 48 + let result = xrpc_router.parse_xrpc_path(segments) 49 + 50 + result 51 + |> should.equal( 52 + option.Some(xrpc_router.XrpcRoute( 53 + nsid: "xyz.statusphere.status", 54 + method: "deleteRecord", 55 + )), 56 + ) 57 + } 58 + 59 + pub fn parse_invalid_xrpc_path_too_short_test() { 60 + let segments = ["xrpc", "invalid"] 61 + let result = xrpc_router.parse_xrpc_path(segments) 62 + 63 + result 64 + |> should.equal(option.None) 65 + } 66 + 67 + pub fn parse_invalid_xrpc_path_wrong_prefix_test() { 68 + let segments = ["api", "xyz.statusphere.status.createRecord"] 69 + let result = xrpc_router.parse_xrpc_path(segments) 70 + 71 + result 72 + |> should.equal(option.None) 73 + } 74 + 75 + pub fn parse_invalid_xrpc_path_empty_test() { 76 + let segments = [] 77 + let result = xrpc_router.parse_xrpc_path(segments) 78 + 79 + result 80 + |> should.equal(option.None) 81 + } 82 + 83 + // Test method parsing 84 + 85 + pub fn parse_method_create_record_test() { 86 + let result = xrpc_router.parse_method("createRecord") 87 + 88 + result 89 + |> should.equal(xrpc_router.CreateRecord) 90 + } 91 + 92 + pub fn parse_method_update_record_test() { 93 + let result = xrpc_router.parse_method("updateRecord") 94 + 95 + result 96 + |> should.equal(xrpc_router.UpdateRecord) 97 + } 98 + 99 + pub fn parse_method_delete_record_test() { 100 + let result = xrpc_router.parse_method("deleteRecord") 101 + 102 + result 103 + |> should.equal(xrpc_router.DeleteRecord) 104 + } 105 + 106 + pub fn parse_method_get_record_test() { 107 + let result = xrpc_router.parse_method("getRecord") 108 + 109 + result 110 + |> should.equal(xrpc_router.GetRecord) 111 + } 112 + 113 + pub fn parse_method_unknown_test() { 114 + let result = xrpc_router.parse_method("unknownMethod") 115 + 116 + result 117 + |> should.equal(xrpc_router.UnknownMethod) 118 + } 119 + 120 + pub fn parse_method_empty_test() { 121 + let result = xrpc_router.parse_method("") 122 + 123 + result 124 + |> should.equal(xrpc_router.UnknownMethod) 125 + }