···11+# GraphQL
22+33+A GraphQL implementation in Gleam providing query parsing, execution, and introspection support.
44+55+## Features
66+77+### Core GraphQL Functionality
88+- **Query Parsing**: GraphQL query language support including:
99+ - Field selection
1010+ - Arguments
1111+ - Aliases
1212+ - Fragments (inline and named)
1313+1414+- **Schema Definition**: Type-safe schema builder with:
1515+ - Object types
1616+ - Scalar types (String, Int, Float, Boolean, ID)
1717+ - List types
1818+ - Non-null types
1919+ - Field resolvers with context-based data access
2020+2121+- **Query Execution**: Execution engine with:
2222+ - Recursive field resolution
2323+ - Nested object support
2424+ - List handling with proper field filtering
2525+ - Fragment spreading and inline fragments
2626+ - Error collection and reporting
2727+ - Path tracking for error context
2828+2929+- **Introspection**: GraphQL introspection support
3030+ - Schema introspection queries
3131+ - Type introspection
3232+ - Field introspection
3333+ - Compatible with GraphiQL and other GraphQL clients
3434+3535+## Architecture
3636+3737+The package is organized into several modules:
3838+3939+- `graphql/lexer.gleam` - Tokenizes GraphQL query strings
4040+- `graphql/parser.gleam` - Parses tokens into an AST
4141+- `graphql/schema.gleam` - Schema definition and type system
4242+- `graphql/executor.gleam` - Query execution engine
4343+- `graphql/value.gleam` - GraphQL value types
4444+- `graphql/introspection.gleam` - Schema introspection
4545+4646+## Usage
4747+4848+### Defining a Schema
4949+5050+```gleam
5151+import graphql/schema
5252+import graphql/value
5353+5454+// Define a simple User type
5555+let user_type = schema.object_type(
5656+ "User",
5757+ "A user in the system",
5858+ [
5959+ schema.field("id", schema.id_type(), "User ID", fn(ctx) {
6060+ // Extract id from context
6161+ case ctx.data {
6262+ option.Some(value.Object(fields)) -> {
6363+ case list.key_find(fields, "id") {
6464+ Ok(id_val) -> Ok(id_val)
6565+ Error(_) -> Ok(value.Null)
6666+ }
6767+ }
6868+ _ -> Ok(value.Null)
6969+ }
7070+ }),
7171+ schema.field("name", schema.string_type(), "User name", fn(ctx) {
7272+ // Extract name from context
7373+ // ... resolver implementation
7474+ }),
7575+ ]
7676+)
7777+7878+// Define root query type
7979+let query_type = schema.object_type(
8080+ "Query",
8181+ "Root query type",
8282+ [
8383+ schema.field("user", user_type, "Get a user", fn(_ctx) {
8484+ Ok(value.Object([
8585+ #("id", value.String("1")),
8686+ #("name", value.String("Alice")),
8787+ ]))
8888+ }),
8989+ ]
9090+)
9191+9292+// Create schema
9393+let my_schema = schema.new(query_type)
9494+```
9595+9696+### Executing Queries
9797+9898+```gleam
9999+import graphql/executor
100100+import graphql/schema
101101+102102+let query = "{ user { id name } }"
103103+let result = executor.execute(query, my_schema, schema.Context(None))
104104+105105+case result {
106106+ Ok(executor.Response(data: data, errors: [])) -> {
107107+ // Query succeeded
108108+ io.println("Data: " <> string.inspect(data))
109109+ }
110110+ Ok(executor.Response(data: data, errors: errors)) -> {
111111+ // Query executed with errors
112112+ io.println("Data: " <> string.inspect(data))
113113+ io.println("Errors: " <> string.inspect(errors))
114114+ }
115115+ Error(err) -> {
116116+ // Query failed to parse or execute
117117+ io.println("Error: " <> err)
118118+ }
119119+}
120120+```
121121+122122+## Test Coverage
123123+124124+The package includes tests covering:
125125+- Parsing
126126+- Execution
127127+- Schema
128128+- Introspection
129129+- Edge cases
130130+131131+## Known Limitations
132132+133133+- Mutations not yet implemented
134134+- Subscriptions not yet implemented
135135+- Directives not yet implemented
136136+- Variables not yet implemented
137137+- Custom scalar types limited to built-in types
138138+139139+## Dependencies
140140+141141+- `gleam_stdlib` >= 0.44.0
142142+143143+## Development
144144+145145+Run tests:
146146+```sh
147147+cd graphql
148148+gleam test
149149+```
150150+151151+Build:
152152+```sh
153153+gleam build
154154+```
+19
graphql/gleam.toml
···11+name = "graphql"
22+version = "1.0.0"
33+44+# Fill out these fields if you intend to generate HTML documentation or publish
55+# your project to the Hex package manager.
66+#
77+# description = ""
88+# licences = ["Apache-2.0"]
99+# repository = { type = "github", user = "", repo = "" }
1010+# links = [{ title = "Website", href = "" }]
1111+#
1212+# For a full reference of all the available options, you can have a look at
1313+# https://gleam.run/writing-gleam/gleam-toml/.
1414+1515+[dependencies]
1616+gleam_stdlib = ">= 0.44.0 and < 2.0.0"
1717+1818+[dev-dependencies]
1919+gleeunit = ">= 1.0.0 and < 2.0.0"
+11
graphql/manifest.toml
···11+# This file was generated by Gleam
22+# You typically do not need to edit this file
33+44+packages = [
55+ { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
66+ { name = "gleeunit", version = "1.7.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "CD701726CBCE5588B375D157B4391CFD0F2F134CD12D9B6998A395484DE05C58" },
77+]
88+99+[requirements]
1010+gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" }
1111+gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
+620
graphql/src/graphql/executor.gleam
···11+/// GraphQL Executor
22+///
33+/// Executes GraphQL queries against a schema
44+import gleam/dict.{type Dict}
55+import gleam/list
66+import gleam/option.{None, Some}
77+import graphql/introspection
88+import graphql/parser
99+import graphql/schema
1010+import graphql/value
1111+1212+/// GraphQL Error
1313+pub type GraphQLError {
1414+ GraphQLError(message: String, path: List(String))
1515+}
1616+1717+/// GraphQL Response
1818+pub type Response {
1919+ Response(data: value.Value, errors: List(GraphQLError))
2020+}
2121+2222+/// Execute a GraphQL query
2323+pub fn execute(
2424+ query: String,
2525+ graphql_schema: schema.Schema,
2626+ ctx: schema.Context,
2727+) -> Result(Response, String) {
2828+ // Parse the query
2929+ case parser.parse(query) {
3030+ Error(parse_error) ->
3131+ Error("Parse error: " <> format_parse_error(parse_error))
3232+ Ok(document) -> {
3333+ // Execute the document
3434+ case execute_document(document, graphql_schema, ctx) {
3535+ Ok(#(data, errors)) -> Ok(Response(data, errors))
3636+ Error(err) -> Error(err)
3737+ }
3838+ }
3939+ }
4040+}
4141+4242+fn format_parse_error(err: parser.ParseError) -> String {
4343+ case err {
4444+ parser.UnexpectedToken(_, msg) -> msg
4545+ parser.UnexpectedEndOfInput(msg) -> msg
4646+ parser.LexerError(_) -> "Lexer error"
4747+ }
4848+}
4949+5050+/// Execute a document
5151+fn execute_document(
5252+ document: parser.Document,
5353+ graphql_schema: schema.Schema,
5454+ ctx: schema.Context,
5555+) -> Result(#(value.Value, List(GraphQLError)), String) {
5656+ case document {
5757+ parser.Document(operations) -> {
5858+ // Separate fragments from executable operations
5959+ let #(fragments, executable_ops) = partition_operations(operations)
6060+6161+ // Build fragments dictionary
6262+ let fragments_dict = build_fragments_dict(fragments)
6363+6464+ // Execute the first executable operation
6565+ case executable_ops {
6666+ [operation, ..] ->
6767+ execute_operation(operation, graphql_schema, ctx, fragments_dict)
6868+ [] -> Error("No executable operations in document")
6969+ }
7070+ }
7171+ }
7272+}
7373+7474+/// Partition operations into fragments and executable operations
7575+fn partition_operations(
7676+ operations: List(parser.Operation),
7777+) -> #(List(parser.Operation), List(parser.Operation)) {
7878+ list.partition(operations, fn(op) {
7979+ case op {
8080+ parser.FragmentDefinition(_, _, _) -> True
8181+ _ -> False
8282+ }
8383+ })
8484+}
8585+8686+/// Build a dictionary of fragments keyed by name
8787+fn build_fragments_dict(
8888+ fragments: List(parser.Operation),
8989+) -> Dict(String, parser.Operation) {
9090+ fragments
9191+ |> list.filter_map(fn(frag) {
9292+ case frag {
9393+ parser.FragmentDefinition(name, _, _) -> Ok(#(name, frag))
9494+ _ -> Error(Nil)
9595+ }
9696+ })
9797+ |> dict.from_list
9898+}
9999+100100+/// Execute an operation
101101+fn execute_operation(
102102+ operation: parser.Operation,
103103+ graphql_schema: schema.Schema,
104104+ ctx: schema.Context,
105105+ fragments: Dict(String, parser.Operation),
106106+) -> Result(#(value.Value, List(GraphQLError)), String) {
107107+ let root_type = schema.query_type(graphql_schema)
108108+109109+ case operation {
110110+ parser.Query(selection_set) ->
111111+ execute_selection_set(
112112+ selection_set,
113113+ root_type,
114114+ graphql_schema,
115115+ ctx,
116116+ fragments,
117117+ [],
118118+ )
119119+ parser.NamedQuery(_, _, selection_set) ->
120120+ execute_selection_set(
121121+ selection_set,
122122+ root_type,
123123+ graphql_schema,
124124+ ctx,
125125+ fragments,
126126+ [],
127127+ )
128128+ parser.Mutation(_) -> Error("Mutations not yet implemented")
129129+ parser.NamedMutation(_, _, _) -> Error("Mutations not yet implemented")
130130+ parser.FragmentDefinition(_, _, _) ->
131131+ Error("Fragment definitions are not executable operations")
132132+ }
133133+}
134134+135135+/// Execute a selection set
136136+fn execute_selection_set(
137137+ selection_set: parser.SelectionSet,
138138+ parent_type: schema.Type,
139139+ graphql_schema: schema.Schema,
140140+ ctx: schema.Context,
141141+ fragments: Dict(String, parser.Operation),
142142+ path: List(String),
143143+) -> Result(#(value.Value, List(GraphQLError)), String) {
144144+ case selection_set {
145145+ parser.SelectionSet(selections) -> {
146146+ let results =
147147+ list.map(selections, fn(selection) {
148148+ execute_selection(
149149+ selection,
150150+ parent_type,
151151+ graphql_schema,
152152+ ctx,
153153+ fragments,
154154+ path,
155155+ )
156156+ })
157157+158158+ // Collect all data and errors, merging fragment fields
159159+ let #(data, errors) = collect_and_merge_fields(results)
160160+161161+ Ok(#(value.Object(data), errors))
162162+ }
163163+ }
164164+}
165165+166166+/// Collect and merge fields from selection results, handling fragment fields
167167+fn collect_and_merge_fields(
168168+ results: List(Result(#(String, value.Value, List(GraphQLError)), String)),
169169+) -> #(List(#(String, value.Value)), List(GraphQLError)) {
170170+ let #(data, errors) =
171171+ results
172172+ |> list.fold(#([], []), fn(acc, r) {
173173+ let #(fields_acc, errors_acc) = acc
174174+ case r {
175175+ Ok(#("__fragment_fields", value.Object(fragment_fields), errs)) -> {
176176+ // Merge fragment fields into parent
177177+ #(
178178+ list.append(fields_acc, fragment_fields),
179179+ list.append(errors_acc, errs),
180180+ )
181181+ }
182182+ Ok(#("__fragment_skip", _, _errs)) -> {
183183+ // Skip fragment that didn't match type condition
184184+ acc
185185+ }
186186+ Ok(#(name, val, errs)) -> {
187187+ // Regular field
188188+ #(
189189+ list.append(fields_acc, [#(name, val)]),
190190+ list.append(errors_acc, errs),
191191+ )
192192+ }
193193+ Error(_) -> acc
194194+ }
195195+ })
196196+197197+ #(data, errors)
198198+}
199199+200200+/// Execute a selection
201201+fn execute_selection(
202202+ selection: parser.Selection,
203203+ parent_type: schema.Type,
204204+ graphql_schema: schema.Schema,
205205+ ctx: schema.Context,
206206+ fragments: Dict(String, parser.Operation),
207207+ path: List(String),
208208+) -> Result(#(String, value.Value, List(GraphQLError)), String) {
209209+ case selection {
210210+ parser.FragmentSpread(name) -> {
211211+ // Look up the fragment definition
212212+ case dict.get(fragments, name) {
213213+ Error(_) -> Error("Fragment '" <> name <> "' not found")
214214+ Ok(parser.FragmentDefinition(
215215+ _fname,
216216+ type_condition,
217217+ fragment_selection_set,
218218+ )) -> {
219219+ // Check type condition
220220+ let current_type_name = schema.type_name(parent_type)
221221+ case type_condition == current_type_name {
222222+ False -> {
223223+ // Type condition doesn't match, skip this fragment
224224+ // Return empty object as a placeholder that will be filtered out
225225+ Ok(#("__fragment_skip", value.Null, []))
226226+ }
227227+ True -> {
228228+ // Type condition matches, execute fragment's selections
229229+ case
230230+ execute_selection_set(
231231+ fragment_selection_set,
232232+ parent_type,
233233+ graphql_schema,
234234+ ctx,
235235+ fragments,
236236+ path,
237237+ )
238238+ {
239239+ Ok(#(value.Object(fields), errs)) -> {
240240+ // Fragment selections should be merged into parent
241241+ // For now, return as a special marker
242242+ Ok(#("__fragment_fields", value.Object(fields), errs))
243243+ }
244244+ Ok(#(val, errs)) -> Ok(#("__fragment_fields", val, errs))
245245+ Error(err) -> Error(err)
246246+ }
247247+ }
248248+ }
249249+ }
250250+ Ok(_) -> Error("Invalid fragment definition")
251251+ }
252252+ }
253253+ parser.InlineFragment(type_condition_opt, inline_selections) -> {
254254+ // Check type condition if present
255255+ let current_type_name = schema.type_name(parent_type)
256256+ let should_execute = case type_condition_opt {
257257+ None -> True
258258+ Some(type_condition) -> type_condition == current_type_name
259259+ }
260260+261261+ case should_execute {
262262+ False -> Ok(#("__fragment_skip", value.Null, []))
263263+ True -> {
264264+ let inline_selection_set = parser.SelectionSet(inline_selections)
265265+ case
266266+ execute_selection_set(
267267+ inline_selection_set,
268268+ parent_type,
269269+ graphql_schema,
270270+ ctx,
271271+ fragments,
272272+ path,
273273+ )
274274+ {
275275+ Ok(#(value.Object(fields), errs)) ->
276276+ Ok(#("__fragment_fields", value.Object(fields), errs))
277277+ Ok(#(val, errs)) -> Ok(#("__fragment_fields", val, errs))
278278+ Error(err) -> Error(err)
279279+ }
280280+ }
281281+ }
282282+ }
283283+ parser.Field(name, _alias, _arguments, nested_selections) -> {
284284+ // Handle introspection meta-fields
285285+ case name {
286286+ "__typename" -> {
287287+ let type_name = schema.type_name(parent_type)
288288+ Ok(#("__typename", value.String(type_name), []))
289289+ }
290290+ "__schema" -> {
291291+ let schema_value = introspection.schema_introspection(graphql_schema)
292292+ // Handle nested selections on __schema
293293+ case nested_selections {
294294+ [] -> Ok(#("__schema", schema_value, []))
295295+ _ -> {
296296+ let selection_set = parser.SelectionSet(nested_selections)
297297+ // We don't have an actual type for __Schema, so we'll handle it specially
298298+ // For now, just return the schema value with nested execution
299299+ case
300300+ execute_introspection_selection_set(
301301+ selection_set,
302302+ schema_value,
303303+ graphql_schema,
304304+ ctx,
305305+ fragments,
306306+ ["__schema", ..path],
307307+ )
308308+ {
309309+ Ok(#(nested_data, nested_errors)) ->
310310+ Ok(#("__schema", nested_data, nested_errors))
311311+ Error(err) -> {
312312+ let error = GraphQLError(err, ["__schema", ..path])
313313+ Ok(#("__schema", value.Null, [error]))
314314+ }
315315+ }
316316+ }
317317+ }
318318+ }
319319+ _ -> {
320320+ // Get field from schema
321321+ case schema.get_field(parent_type, name) {
322322+ None -> {
323323+ let error = GraphQLError("Field '" <> name <> "' not found", path)
324324+ Ok(#(name, value.Null, [error]))
325325+ }
326326+ Some(field) -> {
327327+ // Get the field's type for nested selections
328328+ let field_type_def = schema.field_type(field)
329329+330330+ // Resolve the field
331331+ case schema.resolve_field(field, ctx) {
332332+ Error(err) -> {
333333+ let error = GraphQLError(err, [name, ..path])
334334+ Ok(#(name, value.Null, [error]))
335335+ }
336336+ Ok(field_value) -> {
337337+ // If there are nested selections, recurse
338338+ case nested_selections {
339339+ [] -> Ok(#(name, field_value, []))
340340+ _ -> {
341341+ // Need to resolve nested fields
342342+ case field_value {
343343+ value.Object(_) -> {
344344+ // Execute nested selections using the field's type, not parent type
345345+ let selection_set =
346346+ parser.SelectionSet(nested_selections)
347347+ case
348348+ execute_selection_set(
349349+ selection_set,
350350+ field_type_def,
351351+ graphql_schema,
352352+ ctx,
353353+ fragments,
354354+ [name, ..path],
355355+ )
356356+ {
357357+ Ok(#(nested_data, nested_errors)) ->
358358+ Ok(#(name, nested_data, nested_errors))
359359+ Error(err) -> {
360360+ let error = GraphQLError(err, [name, ..path])
361361+ Ok(#(name, value.Null, [error]))
362362+ }
363363+ }
364364+ }
365365+ value.List(items) -> {
366366+ // Handle list with nested selections
367367+ // Get the inner type from the LIST wrapper
368368+ let inner_type = case
369369+ schema.inner_type(field_type_def)
370370+ {
371371+ option.Some(t) -> t
372372+ option.None -> field_type_def
373373+ }
374374+375375+ // Execute nested selections on each item
376376+ let selection_set =
377377+ parser.SelectionSet(nested_selections)
378378+ let results =
379379+ list.map(items, fn(item) {
380380+ // Create context with this item's data
381381+ let item_ctx = schema.Context(option.Some(item))
382382+ execute_selection_set(
383383+ selection_set,
384384+ inner_type,
385385+ graphql_schema,
386386+ item_ctx,
387387+ fragments,
388388+ [name, ..path],
389389+ )
390390+ })
391391+392392+ // Collect results and errors
393393+ let processed_items =
394394+ results
395395+ |> list.filter_map(fn(r) {
396396+ case r {
397397+ Ok(#(val, _)) -> Ok(val)
398398+ Error(_) -> Error(Nil)
399399+ }
400400+ })
401401+402402+ let all_errors =
403403+ results
404404+ |> list.flat_map(fn(r) {
405405+ case r {
406406+ Ok(#(_, errs)) -> errs
407407+ Error(_) -> []
408408+ }
409409+ })
410410+411411+ Ok(#(name, value.List(processed_items), all_errors))
412412+ }
413413+ _ -> Ok(#(name, field_value, []))
414414+ }
415415+ }
416416+ }
417417+ }
418418+ }
419419+ }
420420+ }
421421+ }
422422+ }
423423+ }
424424+ }
425425+}
426426+427427+/// Execute a selection set on an introspection value (like __schema)
428428+/// This directly reads fields from the value.Object rather than using resolvers
429429+fn execute_introspection_selection_set(
430430+ selection_set: parser.SelectionSet,
431431+ value_obj: value.Value,
432432+ graphql_schema: schema.Schema,
433433+ ctx: schema.Context,
434434+ fragments: Dict(String, parser.Operation),
435435+ path: List(String),
436436+) -> Result(#(value.Value, List(GraphQLError)), String) {
437437+ case selection_set {
438438+ parser.SelectionSet(selections) -> {
439439+ case value_obj {
440440+ value.List(items) -> {
441441+ // For lists, execute the selection set on each item
442442+ let results =
443443+ list.map(items, fn(item) {
444444+ execute_introspection_selection_set(
445445+ selection_set,
446446+ item,
447447+ graphql_schema,
448448+ ctx,
449449+ fragments,
450450+ path,
451451+ )
452452+ })
453453+454454+ // Collect the data and errors
455455+ let data_items =
456456+ results
457457+ |> list.filter_map(fn(r) {
458458+ case r {
459459+ Ok(#(val, _)) -> Ok(val)
460460+ Error(_) -> Error(Nil)
461461+ }
462462+ })
463463+464464+ let all_errors =
465465+ results
466466+ |> list.flat_map(fn(r) {
467467+ case r {
468468+ Ok(#(_, errs)) -> errs
469469+ Error(_) -> []
470470+ }
471471+ })
472472+473473+ Ok(#(value.List(data_items), all_errors))
474474+ }
475475+ value.Null -> {
476476+ // If the value is null, just return null regardless of selections
477477+ // This handles cases like mutationType and subscriptionType which are null
478478+ Ok(#(value.Null, []))
479479+ }
480480+ value.Object(fields) -> {
481481+ // For each selection, find the corresponding field in the object
482482+ let results =
483483+ list.map(selections, fn(selection) {
484484+ case selection {
485485+ parser.FragmentSpread(name) -> {
486486+ // Look up the fragment definition
487487+ case dict.get(fragments, name) {
488488+ Error(_) -> Error(Nil)
489489+ // Fragment not found, skip it
490490+ Ok(parser.FragmentDefinition(
491491+ _fname,
492492+ _type_condition,
493493+ fragment_selection_set,
494494+ )) -> {
495495+ // For introspection, we don't check type conditions - just execute the fragment
496496+ case
497497+ execute_introspection_selection_set(
498498+ fragment_selection_set,
499499+ value_obj,
500500+ graphql_schema,
501501+ ctx,
502502+ fragments,
503503+ path,
504504+ )
505505+ {
506506+ Ok(#(value.Object(fragment_fields), errs)) ->
507507+ Ok(#(
508508+ "__fragment_fields",
509509+ value.Object(fragment_fields),
510510+ errs,
511511+ ))
512512+ Ok(#(val, errs)) ->
513513+ Ok(#("__fragment_fields", val, errs))
514514+ Error(_err) -> Error(Nil)
515515+ }
516516+ }
517517+ Ok(_) -> Error(Nil)
518518+ // Invalid fragment definition
519519+ }
520520+ }
521521+ parser.InlineFragment(_type_condition_opt, inline_selections) -> {
522522+ // For introspection, inline fragments always execute (no type checking needed)
523523+ // Execute the inline fragment's selections on this object
524524+ let inline_selection_set =
525525+ parser.SelectionSet(inline_selections)
526526+ case
527527+ execute_introspection_selection_set(
528528+ inline_selection_set,
529529+ value_obj,
530530+ graphql_schema,
531531+ ctx,
532532+ fragments,
533533+ path,
534534+ )
535535+ {
536536+ Ok(#(value.Object(fragment_fields), errs)) ->
537537+ // Return fragment fields to be merged
538538+ Ok(#(
539539+ "__fragment_fields",
540540+ value.Object(fragment_fields),
541541+ errs,
542542+ ))
543543+ Ok(#(val, errs)) -> Ok(#("__fragment_fields", val, errs))
544544+ Error(_err) -> Error(Nil)
545545+ }
546546+ }
547547+ parser.Field(name, _alias, _arguments, nested_selections) -> {
548548+ // Find the field in the object
549549+ case list.key_find(fields, name) {
550550+ Ok(field_value) -> {
551551+ // Handle nested selections
552552+ case nested_selections {
553553+ [] -> Ok(#(name, field_value, []))
554554+ _ -> {
555555+ let selection_set =
556556+ parser.SelectionSet(nested_selections)
557557+ case
558558+ execute_introspection_selection_set(
559559+ selection_set,
560560+ field_value,
561561+ graphql_schema,
562562+ ctx,
563563+ fragments,
564564+ [name, ..path],
565565+ )
566566+ {
567567+ Ok(#(nested_data, nested_errors)) ->
568568+ Ok(#(name, nested_data, nested_errors))
569569+ Error(err) -> {
570570+ let error = GraphQLError(err, [name, ..path])
571571+ Ok(#(name, value.Null, [error]))
572572+ }
573573+ }
574574+ }
575575+ }
576576+ }
577577+ Error(_) -> {
578578+ let error =
579579+ GraphQLError("Field '" <> name <> "' not found", path)
580580+ Ok(#(name, value.Null, [error]))
581581+ }
582582+ }
583583+ }
584584+ }
585585+ })
586586+587587+ // Collect all data and errors, merging fragment fields
588588+ let #(data, errors) =
589589+ results
590590+ |> list.fold(#([], []), fn(acc, r) {
591591+ let #(fields_acc, errors_acc) = acc
592592+ case r {
593593+ Ok(#("__fragment_fields", value.Object(fragment_fields), errs)) -> {
594594+ // Merge fragment fields into parent
595595+ #(
596596+ list.append(fields_acc, fragment_fields),
597597+ list.append(errors_acc, errs),
598598+ )
599599+ }
600600+ Ok(#(name, val, errs)) -> {
601601+ // Regular field
602602+ #(
603603+ list.append(fields_acc, [#(name, val)]),
604604+ list.append(errors_acc, errs),
605605+ )
606606+ }
607607+ Error(_) -> acc
608608+ }
609609+ })
610610+611611+ Ok(#(value.Object(data), errors))
612612+ }
613613+ _ ->
614614+ Error(
615615+ "Expected object, list, or null for introspection selection set",
616616+ )
617617+ }
618618+ }
619619+ }
620620+}
+199
graphql/src/graphql/introspection.gleam
···11+/// GraphQL Introspection
22+///
33+/// Implements the GraphQL introspection system per the GraphQL spec.
44+/// Provides __schema, __type, and __typename meta-fields.
55+import gleam/list
66+import gleam/option
77+import graphql/schema
88+import graphql/value
99+1010+/// Build introspection value for __schema
1111+pub fn schema_introspection(graphql_schema: schema.Schema) -> value.Value {
1212+ let query_type = schema.query_type(graphql_schema)
1313+1414+ // Build list of all types in the schema
1515+ let all_types = get_all_types(graphql_schema)
1616+1717+ value.Object([
1818+ #("queryType", type_ref(query_type)),
1919+ #("mutationType", value.Null),
2020+ #("subscriptionType", value.Null),
2121+ #("types", value.List(all_types)),
2222+ #("directives", value.List([])),
2323+ ])
2424+}
2525+2626+/// Get all types from the schema
2727+fn get_all_types(graphql_schema: schema.Schema) -> List(value.Value) {
2828+ let query_type = schema.query_type(graphql_schema)
2929+3030+ // Collect all types by traversing the schema
3131+ let mut_collected_types = collect_types_from_type(query_type, [])
3232+3333+ // Deduplicate by type name
3434+ let type_names = list.map(mut_collected_types, schema.type_name)
3535+ let unique_types =
3636+ list.zip(type_names, mut_collected_types)
3737+ |> list.unique
3838+ |> list.map(fn(pair) { pair.1 })
3939+4040+ // Add any built-in scalars that aren't already in the list
4141+ let all_built_ins = [
4242+ schema.string_type(),
4343+ schema.int_type(),
4444+ schema.float_type(),
4545+ schema.boolean_type(),
4646+ schema.id_type(),
4747+ ]
4848+4949+ let collected_names = list.map(unique_types, schema.type_name)
5050+ let missing_built_ins =
5151+ list.filter(all_built_ins, fn(built_in) {
5252+ let built_in_name = schema.type_name(built_in)
5353+ !list.contains(collected_names, built_in_name)
5454+ })
5555+5656+ let all_types = list.append(unique_types, missing_built_ins)
5757+5858+ // Convert all types to introspection values
5959+ list.map(all_types, type_introspection)
6060+}
6161+6262+/// Collect all types referenced in a type (recursively)
6363+fn collect_types_from_type(
6464+ t: schema.Type,
6565+ acc: List(schema.Type),
6666+) -> List(schema.Type) {
6767+ case
6868+ list.any(acc, fn(existing) {
6969+ schema.type_name(existing) == schema.type_name(t)
7070+ })
7171+ {
7272+ True -> acc
7373+ // Already collected this type
7474+ False -> {
7575+ let new_acc = [t, ..acc]
7676+7777+ // Recursively collect types from fields if this is an object type
7878+ case schema.is_object(t) {
7979+ True -> {
8080+ let fields = schema.get_fields(t)
8181+ list.fold(fields, new_acc, fn(acc2, field) {
8282+ let field_type = schema.field_type(field)
8383+ collect_types_from_type_deep(field_type, acc2)
8484+ })
8585+ }
8686+ False -> {
8787+ // Check if it's a wrapping type (List or NonNull)
8888+ case schema.inner_type(t) {
8989+ option.Some(inner) -> collect_types_from_type_deep(inner, new_acc)
9090+ option.None -> new_acc
9191+ }
9292+ }
9393+ }
9494+ }
9595+ }
9696+}
9797+9898+/// Helper to unwrap LIST and NON_NULL and collect the inner type
9999+fn collect_types_from_type_deep(
100100+ t: schema.Type,
101101+ acc: List(schema.Type),
102102+) -> List(schema.Type) {
103103+ // Check if this is a wrapping type (List or NonNull)
104104+ case schema.inner_type(t) {
105105+ option.Some(inner) -> collect_types_from_type_deep(inner, acc)
106106+ option.None -> collect_types_from_type(t, acc)
107107+ }
108108+}
109109+110110+/// Build full type introspection value
111111+fn type_introspection(t: schema.Type) -> value.Value {
112112+ let kind = schema.type_kind(t)
113113+ let type_name = schema.type_name(t)
114114+115115+ // Get inner type for LIST and NON_NULL
116116+ let of_type = case schema.inner_type(t) {
117117+ option.Some(inner) -> type_ref(inner)
118118+ option.None -> value.Null
119119+ }
120120+121121+ // Determine fields based on kind
122122+ let fields = case kind {
123123+ "OBJECT" -> value.List(get_fields_for_type(t))
124124+ _ -> value.Null
125125+ }
126126+127127+ // Handle wrapping types (LIST/NON_NULL) differently
128128+ let name = case kind {
129129+ "LIST" -> value.Null
130130+ "NON_NULL" -> value.Null
131131+ _ -> value.String(type_name)
132132+ }
133133+134134+ value.Object([
135135+ #("kind", value.String(kind)),
136136+ #("name", name),
137137+ #("description", value.Null),
138138+ #("fields", fields),
139139+ #("interfaces", value.List([])),
140140+ #("possibleTypes", value.Null),
141141+ #("enumValues", value.Null),
142142+ #("inputFields", value.Null),
143143+ #("ofType", of_type),
144144+ ])
145145+}
146146+147147+/// Get fields for a type (if it's an object type)
148148+fn get_fields_for_type(t: schema.Type) -> List(value.Value) {
149149+ let fields = schema.get_fields(t)
150150+151151+ list.map(fields, fn(field) {
152152+ let field_type_val = schema.field_type(field)
153153+ let args = schema.field_arguments(field)
154154+155155+ value.Object([
156156+ #("name", value.String(schema.field_name(field))),
157157+ #("description", value.String(schema.field_description(field))),
158158+ #("args", value.List(list.map(args, argument_introspection))),
159159+ #("type", type_ref(field_type_val)),
160160+ #("isDeprecated", value.Boolean(False)),
161161+ #("deprecationReason", value.Null),
162162+ ])
163163+ })
164164+}
165165+166166+/// Build introspection for an argument
167167+fn argument_introspection(arg: schema.Argument) -> value.Value {
168168+ value.Object([
169169+ #("name", value.String(schema.argument_name(arg))),
170170+ #("description", value.String(schema.argument_description(arg))),
171171+ #("type", type_ref(schema.argument_type(arg))),
172172+ #("defaultValue", value.Null),
173173+ ])
174174+}
175175+176176+/// Build a type reference (simplified version of type_introspection for field types)
177177+fn type_ref(t: schema.Type) -> value.Value {
178178+ let kind = schema.type_kind(t)
179179+ let type_name = schema.type_name(t)
180180+181181+ // Get inner type for LIST and NON_NULL
182182+ let of_type = case schema.inner_type(t) {
183183+ option.Some(inner) -> type_ref(inner)
184184+ option.None -> value.Null
185185+ }
186186+187187+ // Handle wrapping types (LIST/NON_NULL) differently
188188+ let name = case kind {
189189+ "LIST" -> value.Null
190190+ "NON_NULL" -> value.Null
191191+ _ -> value.String(type_name)
192192+ }
193193+194194+ value.Object([
195195+ #("kind", value.String(kind)),
196196+ #("name", name),
197197+ #("ofType", of_type),
198198+ ])
199199+}
···11+/// GraphQL Schema - Type System
22+///
33+/// Per GraphQL spec Section 3 - Type System
44+/// Defines the type system including scalars, objects, enums, etc.
55+import gleam/list
66+import gleam/option.{type Option, None}
77+import graphql/value
88+99+/// Resolver context - will contain request context, data loaders, etc.
1010+pub type Context {
1111+ Context(data: Option(value.Value))
1212+}
1313+1414+/// Field resolver function type
1515+pub type Resolver =
1616+ fn(Context) -> Result(value.Value, String)
1717+1818+/// GraphQL Type
1919+pub opaque type Type {
2020+ ScalarType(name: String)
2121+ ObjectType(name: String, description: String, fields: List(Field))
2222+ EnumType(name: String, description: String, values: List(EnumValue))
2323+ ListType(inner_type: Type)
2424+ NonNullType(inner_type: Type)
2525+}
2626+2727+/// GraphQL Field
2828+pub opaque type Field {
2929+ Field(
3030+ name: String,
3131+ field_type: Type,
3232+ description: String,
3333+ arguments: List(Argument),
3434+ resolver: Resolver,
3535+ )
3636+}
3737+3838+/// GraphQL Argument
3939+pub opaque type Argument {
4040+ Argument(
4141+ name: String,
4242+ arg_type: Type,
4343+ description: String,
4444+ default_value: Option(value.Value),
4545+ )
4646+}
4747+4848+/// GraphQL Enum Value
4949+pub opaque type EnumValue {
5050+ EnumValue(name: String, description: String)
5151+}
5252+5353+/// GraphQL Schema
5454+pub opaque type Schema {
5555+ Schema(query_type: Type, mutation_type: Option(Type))
5656+}
5757+5858+// Built-in scalar types
5959+pub fn string_type() -> Type {
6060+ ScalarType("String")
6161+}
6262+6363+pub fn int_type() -> Type {
6464+ ScalarType("Int")
6565+}
6666+6767+pub fn float_type() -> Type {
6868+ ScalarType("Float")
6969+}
7070+7171+pub fn boolean_type() -> Type {
7272+ ScalarType("Boolean")
7373+}
7474+7575+pub fn id_type() -> Type {
7676+ ScalarType("ID")
7777+}
7878+7979+// Type constructors
8080+pub fn object_type(
8181+ name: String,
8282+ description: String,
8383+ fields: List(Field),
8484+) -> Type {
8585+ ObjectType(name, description, fields)
8686+}
8787+8888+pub fn enum_type(
8989+ name: String,
9090+ description: String,
9191+ values: List(EnumValue),
9292+) -> Type {
9393+ EnumType(name, description, values)
9494+}
9595+9696+pub fn list_type(inner_type: Type) -> Type {
9797+ ListType(inner_type)
9898+}
9999+100100+pub fn non_null(inner_type: Type) -> Type {
101101+ NonNullType(inner_type)
102102+}
103103+104104+// Field constructors
105105+pub fn field(
106106+ name: String,
107107+ field_type: Type,
108108+ description: String,
109109+ resolver: Resolver,
110110+) -> Field {
111111+ Field(name, field_type, description, [], resolver)
112112+}
113113+114114+pub fn field_with_args(
115115+ name: String,
116116+ field_type: Type,
117117+ description: String,
118118+ arguments: List(Argument),
119119+ resolver: Resolver,
120120+) -> Field {
121121+ Field(name, field_type, description, arguments, resolver)
122122+}
123123+124124+// Argument constructor
125125+pub fn argument(
126126+ name: String,
127127+ arg_type: Type,
128128+ description: String,
129129+ default_value: Option(value.Value),
130130+) -> Argument {
131131+ Argument(name, arg_type, description, default_value)
132132+}
133133+134134+// Enum value constructor
135135+pub fn enum_value(name: String, description: String) -> EnumValue {
136136+ EnumValue(name, description)
137137+}
138138+139139+// Schema constructor
140140+pub fn schema(query_type: Type, mutation_type: Option(Type)) -> Schema {
141141+ Schema(query_type, mutation_type)
142142+}
143143+144144+// Accessors
145145+pub fn type_name(t: Type) -> String {
146146+ case t {
147147+ ScalarType(name) -> name
148148+ ObjectType(name, _, _) -> name
149149+ EnumType(name, _, _) -> name
150150+ ListType(inner) -> "[" <> type_name(inner) <> "]"
151151+ NonNullType(inner) -> type_name(inner) <> "!"
152152+ }
153153+}
154154+155155+pub fn field_name(f: Field) -> String {
156156+ case f {
157157+ Field(name, _, _, _, _) -> name
158158+ }
159159+}
160160+161161+pub fn query_type(s: Schema) -> Type {
162162+ case s {
163163+ Schema(query_type, _) -> query_type
164164+ }
165165+}
166166+167167+pub fn is_non_null(t: Type) -> Bool {
168168+ case t {
169169+ NonNullType(_) -> True
170170+ _ -> False
171171+ }
172172+}
173173+174174+pub fn is_list(t: Type) -> Bool {
175175+ case t {
176176+ ListType(_) -> True
177177+ _ -> False
178178+ }
179179+}
180180+181181+// Field resolution helpers
182182+pub fn resolve_field(field: Field, ctx: Context) -> Result(value.Value, String) {
183183+ case field {
184184+ Field(_, _, _, _, resolver) -> resolver(ctx)
185185+ }
186186+}
187187+188188+pub fn get_field(t: Type, field_name: String) -> Option(Field) {
189189+ case t {
190190+ ObjectType(_, _, fields) -> {
191191+ list.find(fields, fn(f) {
192192+ case f {
193193+ Field(name, _, _, _, _) -> name == field_name
194194+ }
195195+ })
196196+ |> option.from_result
197197+ }
198198+ _ -> None
199199+ }
200200+}
201201+202202+/// Get the type of a field
203203+pub fn field_type(field: Field) -> Type {
204204+ case field {
205205+ Field(_, ft, _, _, _) -> ft
206206+ }
207207+}
208208+209209+/// Get all fields from an ObjectType
210210+pub fn get_fields(t: Type) -> List(Field) {
211211+ case t {
212212+ ObjectType(_, _, fields) -> fields
213213+ _ -> []
214214+ }
215215+}
216216+217217+/// Get field description
218218+pub fn field_description(field: Field) -> String {
219219+ case field {
220220+ Field(_, _, desc, _, _) -> desc
221221+ }
222222+}
223223+224224+/// Get field arguments
225225+pub fn field_arguments(field: Field) -> List(Argument) {
226226+ case field {
227227+ Field(_, _, _, args, _) -> args
228228+ }
229229+}
230230+231231+/// Get argument name
232232+pub fn argument_name(arg: Argument) -> String {
233233+ case arg {
234234+ Argument(name, _, _, _) -> name
235235+ }
236236+}
237237+238238+/// Get argument type
239239+pub fn argument_type(arg: Argument) -> Type {
240240+ case arg {
241241+ Argument(_, arg_type, _, _) -> arg_type
242242+ }
243243+}
244244+245245+/// Get argument description
246246+pub fn argument_description(arg: Argument) -> String {
247247+ case arg {
248248+ Argument(_, _, desc, _) -> desc
249249+ }
250250+}
251251+252252+/// Check if type is a scalar
253253+pub fn is_scalar(t: Type) -> Bool {
254254+ case t {
255255+ ScalarType(_) -> True
256256+ _ -> False
257257+ }
258258+}
259259+260260+/// Check if type is an object
261261+pub fn is_object(t: Type) -> Bool {
262262+ case t {
263263+ ObjectType(_, _, _) -> True
264264+ _ -> False
265265+ }
266266+}
267267+268268+/// Check if type is an enum
269269+pub fn is_enum(t: Type) -> Bool {
270270+ case t {
271271+ EnumType(_, _, _) -> True
272272+ _ -> False
273273+ }
274274+}
275275+276276+/// Get the inner type from a wrapping type (List or NonNull)
277277+pub fn inner_type(t: Type) -> option.Option(Type) {
278278+ case t {
279279+ ListType(inner) -> option.Some(inner)
280280+ NonNullType(inner) -> option.Some(inner)
281281+ _ -> option.None
282282+ }
283283+}
284284+285285+/// Get the kind of a type as a string for introspection
286286+pub fn type_kind(t: Type) -> String {
287287+ case t {
288288+ ScalarType(_) -> "SCALAR"
289289+ ObjectType(_, _, _) -> "OBJECT"
290290+ EnumType(_, _, _) -> "ENUM"
291291+ ListType(_) -> "LIST"
292292+ NonNullType(_) -> "NON_NULL"
293293+ }
294294+}
+32
graphql/src/graphql/value.gleam
···11+/// GraphQL Value types
22+///
33+/// Per GraphQL spec Section 2 - Language, values can be scalars, enums,
44+/// lists, or objects. This module defines the core Value type used throughout
55+/// the GraphQL implementation.
66+/// A GraphQL value that can be used in queries, responses, and variables
77+pub type Value {
88+ /// Represents null/absence of a value
99+ Null
1010+1111+ /// Integer value (32-bit signed integer per spec)
1212+ Int(Int)
1313+1414+ /// Floating point value (IEEE 754 double precision per spec)
1515+ Float(Float)
1616+1717+ /// UTF-8 string value
1818+ String(String)
1919+2020+ /// Boolean true or false
2121+ Boolean(Bool)
2222+2323+ /// Enum value represented as a string (e.g., "ACTIVE", "PENDING")
2424+ Enum(String)
2525+2626+ /// Ordered list of values
2727+ List(List(Value))
2828+2929+ /// Unordered set of key-value pairs
3030+ /// Using list of tuples for simplicity and ordering preservation
3131+ Object(List(#(String, Value)))
3232+}
+349
graphql/test/graphql/executor_test.gleam
···11+/// Tests for GraphQL Executor
22+///
33+/// Tests query execution combining parser + schema + resolvers
44+import gleam/list
55+import gleam/option.{None}
66+import gleeunit/should
77+import graphql/executor
88+import graphql/schema
99+import graphql/value
1010+1111+// Helper to create a simple test schema
1212+fn test_schema() -> schema.Schema {
1313+ let query_type =
1414+ schema.object_type("Query", "Root query type", [
1515+ schema.field("hello", schema.string_type(), "Hello field", fn(_ctx) {
1616+ Ok(value.String("world"))
1717+ }),
1818+ schema.field("number", schema.int_type(), "Number field", fn(_ctx) {
1919+ Ok(value.Int(42))
2020+ }),
2121+ schema.field_with_args(
2222+ "greet",
2323+ schema.string_type(),
2424+ "Greet someone",
2525+ [schema.argument("name", schema.string_type(), "Name to greet", None)],
2626+ fn(_ctx) { Ok(value.String("Hello, Alice!")) },
2727+ ),
2828+ ])
2929+3030+ schema.schema(query_type, None)
3131+}
3232+3333+// Nested object schema for testing
3434+fn nested_schema() -> schema.Schema {
3535+ let user_type =
3636+ schema.object_type("User", "A user", [
3737+ schema.field("id", schema.id_type(), "User ID", fn(_ctx) {
3838+ Ok(value.String("123"))
3939+ }),
4040+ schema.field("name", schema.string_type(), "User name", fn(_ctx) {
4141+ Ok(value.String("Alice"))
4242+ }),
4343+ ])
4444+4545+ let query_type =
4646+ schema.object_type("Query", "Root query type", [
4747+ schema.field("user", user_type, "Get user", fn(_ctx) {
4848+ Ok(
4949+ value.Object([
5050+ #("id", value.String("123")),
5151+ #("name", value.String("Alice")),
5252+ ]),
5353+ )
5454+ }),
5555+ ])
5656+5757+ schema.schema(query_type, None)
5858+}
5959+6060+pub fn execute_simple_query_test() {
6161+ let schema = test_schema()
6262+ let query = "{ hello }"
6363+6464+ let result = executor.execute(query, schema, schema.Context(None))
6565+6666+ should.be_ok(result)
6767+ |> fn(response) {
6868+ case response {
6969+ executor.Response(
7070+ data: value.Object([#("hello", value.String("world"))]),
7171+ errors: [],
7272+ ) -> True
7373+ _ -> False
7474+ }
7575+ }
7676+ |> should.be_true
7777+}
7878+7979+pub fn execute_multiple_fields_test() {
8080+ let schema = test_schema()
8181+ let query = "{ hello number }"
8282+8383+ let result = executor.execute(query, schema, schema.Context(None))
8484+8585+ should.be_ok(result)
8686+}
8787+8888+pub fn execute_nested_query_test() {
8989+ let schema = nested_schema()
9090+ let query = "{ user { id name } }"
9191+9292+ let result = executor.execute(query, schema, schema.Context(None))
9393+9494+ should.be_ok(result)
9595+}
9696+9797+pub fn execute_field_with_arguments_test() {
9898+ let schema = test_schema()
9999+ let query = "{ greet(name: \"Alice\") }"
100100+101101+ let result = executor.execute(query, schema, schema.Context(None))
102102+103103+ should.be_ok(result)
104104+}
105105+106106+pub fn execute_invalid_query_returns_error_test() {
107107+ let schema = test_schema()
108108+ let query = "{ invalid }"
109109+110110+ let result = executor.execute(query, schema, schema.Context(None))
111111+112112+ // Should return error since field doesn't exist
113113+ case result {
114114+ Ok(executor.Response(_, [_, ..])) -> should.be_true(True)
115115+ Error(_) -> should.be_true(True)
116116+ _ -> should.be_true(False)
117117+ }
118118+}
119119+120120+pub fn execute_parse_error_returns_error_test() {
121121+ let schema = test_schema()
122122+ let query = "{ invalid syntax"
123123+124124+ let result = executor.execute(query, schema, schema.Context(None))
125125+126126+ should.be_error(result)
127127+}
128128+129129+pub fn execute_typename_introspection_test() {
130130+ let schema = test_schema()
131131+ let query = "{ __typename }"
132132+133133+ let result = executor.execute(query, schema, schema.Context(None))
134134+135135+ should.be_ok(result)
136136+ |> fn(response) {
137137+ case response {
138138+ executor.Response(
139139+ data: value.Object([#("__typename", value.String("Query"))]),
140140+ errors: [],
141141+ ) -> True
142142+ _ -> False
143143+ }
144144+ }
145145+ |> should.be_true
146146+}
147147+148148+pub fn execute_typename_with_regular_fields_test() {
149149+ let schema = test_schema()
150150+ let query = "{ __typename hello }"
151151+152152+ let result = executor.execute(query, schema, schema.Context(None))
153153+154154+ should.be_ok(result)
155155+ |> fn(response) {
156156+ case response {
157157+ executor.Response(
158158+ data: value.Object([
159159+ #("__typename", value.String("Query")),
160160+ #("hello", value.String("world")),
161161+ ]),
162162+ errors: [],
163163+ ) -> True
164164+ _ -> False
165165+ }
166166+ }
167167+ |> should.be_true
168168+}
169169+170170+pub fn execute_schema_introspection_query_type_test() {
171171+ let schema = test_schema()
172172+ let query = "{ __schema { queryType { name } } }"
173173+174174+ let result = executor.execute(query, schema, schema.Context(None))
175175+176176+ should.be_ok(result)
177177+ |> fn(response) {
178178+ case response {
179179+ executor.Response(
180180+ data: value.Object([
181181+ #(
182182+ "__schema",
183183+ value.Object([
184184+ #("queryType", value.Object([#("name", value.String("Query"))])),
185185+ ]),
186186+ ),
187187+ ]),
188188+ errors: [],
189189+ ) -> True
190190+ _ -> False
191191+ }
192192+ }
193193+ |> should.be_true
194194+}
195195+196196+// Fragment execution tests
197197+pub fn execute_simple_fragment_spread_test() {
198198+ let schema = nested_schema()
199199+ let query =
200200+ "
201201+ fragment UserFields on User {
202202+ id
203203+ name
204204+ }
205205+206206+ { user { ...UserFields } }
207207+ "
208208+209209+ let result = executor.execute(query, schema, schema.Context(None))
210210+211211+ // Test should pass - fragment should be expanded
212212+ should.be_ok(result)
213213+ |> fn(response) {
214214+ case response {
215215+ executor.Response(data: value.Object(fields), errors: []) -> {
216216+ // Check that we have a user field
217217+ case list.key_find(fields, "user") {
218218+ Ok(value.Object(user_fields)) -> {
219219+ // Check that user has id and name fields
220220+ let has_id = case list.key_find(user_fields, "id") {
221221+ Ok(value.String("123")) -> True
222222+ _ -> False
223223+ }
224224+ let has_name = case list.key_find(user_fields, "name") {
225225+ Ok(value.String("Alice")) -> True
226226+ _ -> False
227227+ }
228228+ has_id && has_name
229229+ }
230230+ _ -> False
231231+ }
232232+ }
233233+ _ -> False
234234+ }
235235+ }
236236+ |> should.be_true
237237+}
238238+239239+// Test for list fields with nested selections
240240+pub fn execute_list_with_nested_selections_test() {
241241+ // Create a schema with a list field
242242+ let user_type =
243243+ schema.object_type("User", "A user", [
244244+ schema.field("id", schema.id_type(), "User ID", fn(ctx) {
245245+ case ctx.data {
246246+ option.Some(value.Object(fields)) -> {
247247+ case list.key_find(fields, "id") {
248248+ Ok(id_val) -> Ok(id_val)
249249+ Error(_) -> Ok(value.Null)
250250+ }
251251+ }
252252+ _ -> Ok(value.Null)
253253+ }
254254+ }),
255255+ schema.field("name", schema.string_type(), "User name", fn(ctx) {
256256+ case ctx.data {
257257+ option.Some(value.Object(fields)) -> {
258258+ case list.key_find(fields, "name") {
259259+ Ok(name_val) -> Ok(name_val)
260260+ Error(_) -> Ok(value.Null)
261261+ }
262262+ }
263263+ _ -> Ok(value.Null)
264264+ }
265265+ }),
266266+ schema.field("email", schema.string_type(), "User email", fn(ctx) {
267267+ case ctx.data {
268268+ option.Some(value.Object(fields)) -> {
269269+ case list.key_find(fields, "email") {
270270+ Ok(email_val) -> Ok(email_val)
271271+ Error(_) -> Ok(value.Null)
272272+ }
273273+ }
274274+ _ -> Ok(value.Null)
275275+ }
276276+ }),
277277+ ])
278278+279279+ let list_type = schema.list_type(user_type)
280280+281281+ let query_type =
282282+ schema.object_type("Query", "Root query type", [
283283+ schema.field("users", list_type, "Get all users", fn(_ctx) {
284284+ // Return a list of user objects
285285+ Ok(
286286+ value.List([
287287+ value.Object([
288288+ #("id", value.String("1")),
289289+ #("name", value.String("Alice")),
290290+ #("email", value.String("alice@example.com")),
291291+ ]),
292292+ value.Object([
293293+ #("id", value.String("2")),
294294+ #("name", value.String("Bob")),
295295+ #("email", value.String("bob@example.com")),
296296+ ]),
297297+ ]),
298298+ )
299299+ }),
300300+ ])
301301+302302+ let schema = schema.schema(query_type, None)
303303+304304+ // Query with nested field selection - only request id and name, not email
305305+ let query = "{ users { id name } }"
306306+307307+ let result = executor.execute(query, schema, schema.Context(None))
308308+309309+ // The result should only contain id and name fields, NOT email
310310+ should.be_ok(result)
311311+ |> fn(response) {
312312+ case response {
313313+ executor.Response(data: value.Object(fields), errors: []) -> {
314314+ case list.key_find(fields, "users") {
315315+ Ok(value.List(users)) -> {
316316+ // Should have 2 users
317317+ list.length(users) == 2
318318+ && list.all(users, fn(user) {
319319+ case user {
320320+ value.Object(user_fields) -> {
321321+ // Each user should have exactly 2 fields: id and name
322322+ let field_count = list.length(user_fields)
323323+ let has_id = case list.key_find(user_fields, "id") {
324324+ Ok(_) -> True
325325+ _ -> False
326326+ }
327327+ let has_name = case list.key_find(user_fields, "name") {
328328+ Ok(_) -> True
329329+ _ -> False
330330+ }
331331+ let has_email = case list.key_find(user_fields, "email") {
332332+ Ok(_) -> True
333333+ _ -> False
334334+ }
335335+ // Should have id and name, but NOT email
336336+ field_count == 2 && has_id && has_name && !has_email
337337+ }
338338+ _ -> False
339339+ }
340340+ })
341341+ }
342342+ _ -> False
343343+ }
344344+ }
345345+ _ -> False
346346+ }
347347+ }
348348+ |> should.be_true
349349+}
+311
graphql/test/graphql/introspection_test.gleam
···11+/// Tests for GraphQL Introspection
22+///
33+/// Comprehensive tests for introspection queries
44+import gleam/list
55+import gleam/option.{None}
66+import gleeunit/should
77+import graphql/executor
88+import graphql/schema
99+import graphql/value
1010+1111+// Helper to create a simple test schema
1212+fn test_schema() -> schema.Schema {
1313+ let query_type =
1414+ schema.object_type("Query", "Root query type", [
1515+ schema.field("hello", schema.string_type(), "Hello field", fn(_ctx) {
1616+ Ok(value.String("world"))
1717+ }),
1818+ schema.field("number", schema.int_type(), "Number field", fn(_ctx) {
1919+ Ok(value.Int(42))
2020+ }),
2121+ ])
2222+2323+ schema.schema(query_type, None)
2424+}
2525+2626+/// Test: Multiple scalar fields on __schema
2727+/// This test verifies that all requested fields on __schema are returned
2828+pub fn schema_multiple_fields_test() {
2929+ let schema = test_schema()
3030+ let query =
3131+ "{ __schema { queryType { name } mutationType { name } subscriptionType { name } } }"
3232+3333+ let result = executor.execute(query, schema, schema.Context(None))
3434+3535+ should.be_ok(result)
3636+ |> fn(response) {
3737+ case response {
3838+ executor.Response(data: value.Object(fields), errors: []) -> {
3939+ // Check that we have __schema field
4040+ case list.key_find(fields, "__schema") {
4141+ Ok(value.Object(schema_fields)) -> {
4242+ // Check for all three fields
4343+ let has_query_type = case
4444+ list.key_find(schema_fields, "queryType")
4545+ {
4646+ Ok(value.Object(_)) -> True
4747+ _ -> False
4848+ }
4949+ let has_mutation_type = case
5050+ list.key_find(schema_fields, "mutationType")
5151+ {
5252+ Ok(value.Null) -> True
5353+ // Should be null
5454+ _ -> False
5555+ }
5656+ let has_subscription_type = case
5757+ list.key_find(schema_fields, "subscriptionType")
5858+ {
5959+ Ok(value.Null) -> True
6060+ // Should be null
6161+ _ -> False
6262+ }
6363+ has_query_type && has_mutation_type && has_subscription_type
6464+ }
6565+ _ -> False
6666+ }
6767+ }
6868+ _ -> False
6969+ }
7070+ }
7171+ |> should.be_true
7272+}
7373+7474+/// Test: types field with other fields
7575+/// Verifies that the types array is returned along with other fields
7676+pub fn schema_types_with_other_fields_test() {
7777+ let schema = test_schema()
7878+ let query = "{ __schema { queryType { name } types { name } } }"
7979+8080+ let result = executor.execute(query, schema, schema.Context(None))
8181+8282+ should.be_ok(result)
8383+ |> fn(response) {
8484+ case response {
8585+ executor.Response(data: value.Object(fields), errors: []) -> {
8686+ case list.key_find(fields, "__schema") {
8787+ Ok(value.Object(schema_fields)) -> {
8888+ // Check for both fields
8989+ let has_query_type = case
9090+ list.key_find(schema_fields, "queryType")
9191+ {
9292+ Ok(value.Object(qt_fields)) -> {
9393+ case list.key_find(qt_fields, "name") {
9494+ Ok(value.String("Query")) -> True
9595+ _ -> False
9696+ }
9797+ }
9898+ _ -> False
9999+ }
100100+ let has_types = case list.key_find(schema_fields, "types") {
101101+ Ok(value.List(types)) -> {
102102+ // Should have 6 types: Query + 5 scalars
103103+ list.length(types) == 6
104104+ }
105105+ _ -> False
106106+ }
107107+ has_query_type && has_types
108108+ }
109109+ _ -> False
110110+ }
111111+ }
112112+ _ -> False
113113+ }
114114+ }
115115+ |> should.be_true
116116+}
117117+118118+/// Test: All __schema top-level fields
119119+/// Verifies that a query with all possible __schema fields returns all of them
120120+pub fn schema_all_fields_test() {
121121+ let schema = test_schema()
122122+ let query =
123123+ "{ __schema { queryType { name } mutationType { name } subscriptionType { name } types { name } directives { name } } }"
124124+125125+ let result = executor.execute(query, schema, schema.Context(None))
126126+127127+ should.be_ok(result)
128128+ |> fn(response) {
129129+ case response {
130130+ executor.Response(data: value.Object(fields), errors: []) -> {
131131+ case list.key_find(fields, "__schema") {
132132+ Ok(value.Object(schema_fields)) -> {
133133+ // Check for all five fields
134134+ let field_count = list.length(schema_fields)
135135+ // Should have exactly 5 fields
136136+ field_count == 5
137137+ }
138138+ _ -> False
139139+ }
140140+ }
141141+ _ -> False
142142+ }
143143+ }
144144+ |> should.be_true
145145+}
146146+147147+/// Test: Field order doesn't matter
148148+/// Verifies that field order in the query doesn't affect results
149149+pub fn schema_field_order_test() {
150150+ let schema = test_schema()
151151+ let query1 = "{ __schema { types { name } queryType { name } } }"
152152+ let query2 = "{ __schema { queryType { name } types { name } } }"
153153+154154+ let result1 = executor.execute(query1, schema, schema.Context(None))
155155+ let result2 = executor.execute(query2, schema, schema.Context(None))
156156+157157+ // Both should succeed
158158+ should.be_ok(result1)
159159+ should.be_ok(result2)
160160+161161+ // Both should have the same fields
162162+ case result1, result2 {
163163+ Ok(executor.Response(data: value.Object(fields1), errors: [])),
164164+ Ok(executor.Response(data: value.Object(fields2), errors: []))
165165+ -> {
166166+ case
167167+ list.key_find(fields1, "__schema"),
168168+ list.key_find(fields2, "__schema")
169169+ {
170170+ Ok(value.Object(schema_fields1)), Ok(value.Object(schema_fields2)) -> {
171171+ let count1 = list.length(schema_fields1)
172172+ let count2 = list.length(schema_fields2)
173173+ // Both should have 2 fields
174174+ count1 == 2 && count2 == 2
175175+ }
176176+ _, _ -> False
177177+ }
178178+ }
179179+ _, _ -> False
180180+ }
181181+ |> should.be_true
182182+}
183183+184184+/// Test: Nested introspection on types
185185+/// Verifies that nested field selections work correctly
186186+pub fn schema_types_nested_fields_test() {
187187+ let schema = test_schema()
188188+ let query = "{ __schema { types { name kind fields { name } } } }"
189189+190190+ let result = executor.execute(query, schema, schema.Context(None))
191191+192192+ should.be_ok(result)
193193+ |> fn(response) {
194194+ case response {
195195+ executor.Response(data: value.Object(fields), errors: []) -> {
196196+ case list.key_find(fields, "__schema") {
197197+ Ok(value.Object(schema_fields)) -> {
198198+ case list.key_find(schema_fields, "types") {
199199+ Ok(value.List(types)) -> {
200200+ // Check that each type has name, kind, and fields
201201+ list.all(types, fn(type_val) {
202202+ case type_val {
203203+ value.Object(type_fields) -> {
204204+ let has_name = case list.key_find(type_fields, "name") {
205205+ Ok(_) -> True
206206+ _ -> False
207207+ }
208208+ let has_kind = case list.key_find(type_fields, "kind") {
209209+ Ok(_) -> True
210210+ _ -> False
211211+ }
212212+ let has_fields = case
213213+ list.key_find(type_fields, "fields")
214214+ {
215215+ Ok(_) -> True
216216+ // Can be null or list
217217+ _ -> False
218218+ }
219219+ has_name && has_kind && has_fields
220220+ }
221221+ _ -> False
222222+ }
223223+ })
224224+ }
225225+ _ -> False
226226+ }
227227+ }
228228+ _ -> False
229229+ }
230230+ }
231231+ _ -> False
232232+ }
233233+ }
234234+ |> should.be_true
235235+}
236236+237237+/// Test: Empty nested selections on null fields
238238+/// Verifies that querying nested fields on null values doesn't cause errors
239239+pub fn schema_null_field_with_deep_nesting_test() {
240240+ let schema = test_schema()
241241+ let query = "{ __schema { mutationType { name fields { name } } } }"
242242+243243+ let result = executor.execute(query, schema, schema.Context(None))
244244+245245+ should.be_ok(result)
246246+ |> fn(response) {
247247+ case response {
248248+ executor.Response(data: value.Object(fields), errors: []) -> {
249249+ case list.key_find(fields, "__schema") {
250250+ Ok(value.Object(schema_fields)) -> {
251251+ case list.key_find(schema_fields, "mutationType") {
252252+ Ok(value.Null) -> True
253253+ // Should be null, not error
254254+ _ -> False
255255+ }
256256+ }
257257+ _ -> False
258258+ }
259259+ }
260260+ _ -> False
261261+ }
262262+ }
263263+ |> should.be_true
264264+}
265265+266266+/// Test: Inline fragments in introspection
267267+/// Verifies that inline fragments work correctly in introspection queries (like GraphiQL uses)
268268+pub fn schema_inline_fragment_test() {
269269+ let schema = test_schema()
270270+ let query = "{ __schema { types { ... on __Type { kind name } } } }"
271271+272272+ let result = executor.execute(query, schema, schema.Context(None))
273273+274274+ should.be_ok(result)
275275+ |> fn(response) {
276276+ case response {
277277+ executor.Response(data: value.Object(fields), errors: []) -> {
278278+ case list.key_find(fields, "__schema") {
279279+ Ok(value.Object(schema_fields)) -> {
280280+ case list.key_find(schema_fields, "types") {
281281+ Ok(value.List(types)) -> {
282282+ // Should have 6 types with kind and name fields
283283+ list.length(types) == 6
284284+ && list.all(types, fn(type_val) {
285285+ case type_val {
286286+ value.Object(type_fields) -> {
287287+ let has_kind = case list.key_find(type_fields, "kind") {
288288+ Ok(value.String(_)) -> True
289289+ _ -> False
290290+ }
291291+ let has_name = case list.key_find(type_fields, "name") {
292292+ Ok(value.String(_)) -> True
293293+ _ -> False
294294+ }
295295+ has_kind && has_name
296296+ }
297297+ _ -> False
298298+ }
299299+ })
300300+ }
301301+ _ -> False
302302+ }
303303+ }
304304+ _ -> False
305305+ }
306306+ }
307307+ _ -> False
308308+ }
309309+ }
310310+ |> should.be_true
311311+}
···11+# jetstream
22+33+[](https://hex.pm/packages/jetstream)
44+[](https://hexdocs.pm/jetstream/)
55+66+```sh
77+gleam add jetstream@1
88+```
99+```gleam
1010+import jetstream
1111+1212+pub fn main() -> Nil {
1313+ // TODO: An example of the project in use
1414+}
1515+```
1616+1717+Further documentation can be found at <https://hexdocs.pm/jetstream>.
1818+1919+## Development
2020+2121+```sh
2222+gleam run # Run the project
2323+gleam test # Run the tests
2424+```
+23
jetstream/gleam.toml
···11+name = "jetstream"
22+version = "1.0.0"
33+44+# Fill out these fields if you intend to generate HTML documentation or publish
55+# your project to the Hex package manager.
66+#
77+# description = ""
88+# licences = ["Apache-2.0"]
99+# repository = { type = "github", user = "", repo = "" }
1010+# links = [{ title = "Website", href = "" }]
1111+#
1212+# For a full reference of all the available options, you can have a look at
1313+# https://gleam.run/writing-gleam/gleam-toml/.
1414+1515+[dependencies]
1616+gleam_stdlib = ">= 0.44.0 and < 2.0.0"
1717+gleam_erlang = ">= 1.0.0 and < 2.0.0"
1818+gleam_http = ">= 4.0.0 and < 5.0.0"
1919+gleam_json = ">= 3.0.2 and < 4.0.0"
2020+gun = ">= 2.2.0 and < 3.0.0"
2121+2222+[dev-dependencies]
2323+gleeunit = ">= 1.0.0 and < 2.0.0"
+20
jetstream/manifest.toml
···11+# This file was generated by Gleam
22+# You typically do not need to edit this file
33+44+packages = [
55+ { name = "cowlib", version = "2.16.0", build_tools = ["make", "rebar3"], requirements = [], otp_app = "cowlib", source = "hex", outer_checksum = "7F478D80D66B747344F0EA7708C187645CFCC08B11AA424632F78E25BF05DB51" },
66+ { name = "gleam_erlang", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_erlang", source = "hex", outer_checksum = "1124AD3AA21143E5AF0FC5CF3D9529F6DB8CA03E43A55711B60B6B7B3874375C" },
77+ { name = "gleam_http", version = "4.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_http", source = "hex", outer_checksum = "82EA6A717C842456188C190AFB372665EA56CE13D8559BF3B1DD9E40F619EE0C" },
88+ { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" },
99+ { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
1010+ { name = "gleeunit", version = "1.6.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "FDC68A8C492B1E9B429249062CD9BAC9B5538C6FBF584817205D0998C42E1DAC" },
1111+ { name = "gun", version = "2.2.0", build_tools = ["make", "rebar3"], requirements = ["cowlib"], otp_app = "gun", source = "hex", outer_checksum = "76022700C64287FEB4DF93A1795CFF6741B83FB37415C40C34C38D2A4645261A" },
1212+]
1313+1414+[requirements]
1515+gleam_erlang = { version = ">= 1.0.0 and < 2.0.0" }
1616+gleam_http = { version = ">= 4.0.0 and < 5.0.0" }
1717+gleam_json = { version = ">= 3.0.2 and < 4.0.0" }
1818+gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" }
1919+gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
2020+gun = { version = ">= 2.2.0 and < 3.0.0" }
+233
jetstream/src/jetstream.gleam
···11+import gleam/dynamic.{type Dynamic}
22+import gleam/dynamic/decode
33+import gleam/erlang/process.{type Pid}
44+import gleam/io
55+import gleam/json
66+import gleam/list
77+import gleam/option.{type Option}
88+import gleam/string
99+1010+/// Jetstream event types
1111+pub type JetstreamEvent {
1212+ CommitEvent(did: String, time_us: Int, commit: CommitData)
1313+ IdentityEvent(did: String, time_us: Int, identity: IdentityData)
1414+ AccountEvent(did: String, time_us: Int, account: AccountData)
1515+ UnknownEvent(raw: String)
1616+}
1717+1818+pub type CommitData {
1919+ CommitData(
2020+ rev: String,
2121+ operation: String,
2222+ collection: String,
2323+ rkey: String,
2424+ record: Option(Dynamic),
2525+ cid: Option(String),
2626+ )
2727+}
2828+2929+pub type IdentityData {
3030+ IdentityData(did: String, handle: String, seq: Int, time: String)
3131+}
3232+3333+pub type AccountData {
3434+ AccountData(active: Bool, did: String, seq: Int, time: String)
3535+}
3636+3737+/// Configuration for Jetstream consumer
3838+pub type JetstreamConfig {
3939+ JetstreamConfig(
4040+ endpoint: String,
4141+ wanted_collections: List(String),
4242+ wanted_dids: List(String),
4343+ )
4444+}
4545+4646+/// Create a default configuration for US East endpoint
4747+pub fn default_config() -> JetstreamConfig {
4848+ JetstreamConfig(
4949+ endpoint: "wss://jetstream2.us-east.bsky.network/subscribe",
5050+ wanted_collections: [],
5151+ wanted_dids: [],
5252+ )
5353+}
5454+5555+/// Build the WebSocket URL with query parameters
5656+pub fn build_url(config: JetstreamConfig) -> String {
5757+ let base = config.endpoint
5858+ let mut_params = []
5959+6060+ // Add wanted collections (each as a separate query parameter)
6161+ let mut_params = case config.wanted_collections {
6262+ [] -> mut_params
6363+ collections -> {
6464+ let collection_params =
6565+ list.map(collections, fn(col) { "wantedCollections=" <> col })
6666+ list.append(collection_params, mut_params)
6767+ }
6868+ }
6969+7070+ // Add wanted DIDs (each as a separate query parameter)
7171+ let mut_params = case config.wanted_dids {
7272+ [] -> mut_params
7373+ dids -> {
7474+ let did_params = list.map(dids, fn(did) { "wantedDids=" <> did })
7575+ list.append(did_params, mut_params)
7676+ }
7777+ }
7878+7979+ case mut_params {
8080+ [] -> base
8181+ params -> base <> "?" <> string.join(list.reverse(params), "&")
8282+ }
8383+}
8484+8585+/// Connect to Jetstream WebSocket using Erlang gun library
8686+@external(erlang, "jetstream_ws_ffi", "connect")
8787+pub fn connect(url: String, handler_pid: Pid) -> Result(Pid, Dynamic)
8888+8989+/// Start consuming the Jetstream feed
9090+pub fn start_consumer(
9191+ config: JetstreamConfig,
9292+ on_event: fn(String) -> Nil,
9393+) -> Nil {
9494+ let url = build_url(config)
9595+ io.println("🔗 Jetstream URL: " <> url)
9696+ let self = process.self()
9797+ let result = connect(url, self)
9898+9999+ case result {
100100+ Ok(_conn_pid) -> {
101101+ receive_loop(on_event)
102102+ }
103103+ Error(err) -> {
104104+ io.println("Failed to connect to Jetstream")
105105+ io.println_error(string.inspect(err))
106106+ }
107107+ }
108108+}
109109+110110+/// Receive loop for WebSocket messages
111111+fn receive_loop(on_event: fn(String) -> Nil) -> Nil {
112112+ // Call Erlang to receive one message
113113+ case receive_ws_message() {
114114+ Ok(text) -> {
115115+ on_event(text)
116116+ receive_loop(on_event)
117117+ }
118118+ Error(_) -> {
119119+ // Timeout or error, continue loop
120120+ receive_loop(on_event)
121121+ }
122122+ }
123123+}
124124+125125+/// Receive a WebSocket message from the message queue
126126+@external(erlang, "jetstream_ffi", "receive_ws_message")
127127+fn receive_ws_message() -> Result(String, Nil)
128128+129129+/// Parse a JSON event string into a JetstreamEvent
130130+pub fn parse_event(json_string: String) -> JetstreamEvent {
131131+ // Try to parse as commit event first
132132+ case json.parse(json_string, commit_event_decoder()) {
133133+ Ok(event) -> event
134134+ Error(_) -> {
135135+ // Try identity event
136136+ case json.parse(json_string, identity_event_decoder()) {
137137+ Ok(event) -> event
138138+ Error(_) -> {
139139+ // Try account event
140140+ case json.parse(json_string, account_event_decoder()) {
141141+ Ok(event) -> event
142142+ Error(_) -> UnknownEvent(json_string)
143143+ }
144144+ }
145145+ }
146146+ }
147147+ }
148148+}
149149+150150+/// Decoder for commit events
151151+fn commit_event_decoder() {
152152+ use did <- decode.field("did", decode.string)
153153+ use time_us <- decode.field("time_us", decode.int)
154154+ use commit <- decode.field("commit", commit_data_decoder())
155155+ decode.success(CommitEvent(did: did, time_us: time_us, commit: commit))
156156+}
157157+158158+/// Decoder for commit data - handles both create/update (with record) and delete (without)
159159+fn commit_data_decoder() {
160160+ // Try decoder with record and cid fields first (for create/update)
161161+ // If that fails, try without (for delete)
162162+ decode.one_of(commit_with_record_decoder(), or: [
163163+ commit_without_record_decoder(),
164164+ ])
165165+}
166166+167167+/// Decoder for commit with record (create/update operations)
168168+fn commit_with_record_decoder() {
169169+ use rev <- decode.field("rev", decode.string)
170170+ use operation <- decode.field("operation", decode.string)
171171+ use collection <- decode.field("collection", decode.string)
172172+ use rkey <- decode.field("rkey", decode.string)
173173+ use record <- decode.field("record", decode.dynamic)
174174+ use cid <- decode.field("cid", decode.string)
175175+ decode.success(CommitData(
176176+ rev: rev,
177177+ operation: operation,
178178+ collection: collection,
179179+ rkey: rkey,
180180+ record: option.Some(record),
181181+ cid: option.Some(cid),
182182+ ))
183183+}
184184+185185+/// Decoder for commit without record (delete operations)
186186+fn commit_without_record_decoder() {
187187+ use rev <- decode.field("rev", decode.string)
188188+ use operation <- decode.field("operation", decode.string)
189189+ use collection <- decode.field("collection", decode.string)
190190+ use rkey <- decode.field("rkey", decode.string)
191191+ decode.success(CommitData(
192192+ rev: rev,
193193+ operation: operation,
194194+ collection: collection,
195195+ rkey: rkey,
196196+ record: option.None,
197197+ cid: option.None,
198198+ ))
199199+}
200200+201201+/// Decoder for identity events
202202+fn identity_event_decoder() {
203203+ use did <- decode.field("did", decode.string)
204204+ use time_us <- decode.field("time_us", decode.int)
205205+ use identity <- decode.field("identity", identity_data_decoder())
206206+ decode.success(IdentityEvent(did: did, time_us: time_us, identity: identity))
207207+}
208208+209209+/// Decoder for identity data
210210+fn identity_data_decoder() {
211211+ use did <- decode.field("did", decode.string)
212212+ use handle <- decode.field("handle", decode.string)
213213+ use seq <- decode.field("seq", decode.int)
214214+ use time <- decode.field("time", decode.string)
215215+ decode.success(IdentityData(did: did, handle: handle, seq: seq, time: time))
216216+}
217217+218218+/// Decoder for account events
219219+fn account_event_decoder() {
220220+ use did <- decode.field("did", decode.string)
221221+ use time_us <- decode.field("time_us", decode.int)
222222+ use account <- decode.field("account", account_data_decoder())
223223+ decode.success(AccountEvent(did: did, time_us: time_us, account: account))
224224+}
225225+226226+/// Decoder for account data
227227+fn account_data_decoder() {
228228+ use active <- decode.field("active", decode.bool)
229229+ use did <- decode.field("did", decode.string)
230230+ use seq <- decode.field("seq", decode.int)
231231+ use time <- decode.field("time", decode.string)
232232+ decode.success(AccountData(active: active, did: did, seq: seq, time: time))
233233+}
···11+# Lexicon - AT Protocol Schema Validation for Gleam
22+33+A Gleam library for validating AT Protocol Lexicon schemas and data records, powered by the Rust [`slices-lexicon`](https://crates.io/crates/slices-lexicon) crate via Native Implemented Functions (NIFs).
44+55+## Features
66+77+- Validate AT Protocol Lexicon schema documents
88+- Validate data records against their schemas
99+- Check NSID (Namespaced Identifier) validity
1010+- High-performance validation using native Rust code
1111+- Type-safe Gleam API
1212+1313+## Prerequisites
1414+1515+- Rust toolchain (install via [rustup](https://rustup.rs/))
1616+- Gleam compiler
1717+- Make (for build automation)
1818+1919+## Building
2020+2121+The library requires building the Rust NIF before use:
2222+2323+```bash
2424+cd lexicon
2525+make build
2626+```
2727+2828+This will:
2929+1. Compile the Rust NIF library
3030+2. Copy it to the `priv/` directory
3131+3. Make it available for Gleam to load
3232+3333+### Platform-Specific Notes
3434+3535+- **macOS**: The `.dylib` file is automatically renamed to `.so` for BEAM compatibility
3636+- **Linux**: Uses `.so` extension directly
3737+- **Windows**: Uses `.dll` extension
3838+3939+## Usage
4040+4141+Add the lexicon library to your `gleam.toml`:
4242+4343+```toml
4444+[dependencies]
4545+lexicon = { path = "../lexicon" }
4646+```
4747+4848+### Example: Validating Lexicon Schemas
4949+5050+```gleam
5151+import lexicon
5252+import gleam/io
5353+5454+pub fn main() {
5555+ let schema_json = "{\"lexicon\": 1, \"id\": \"com.example.post\", ...}"
5656+5757+ case lexicon.validate_schemas([schema_json]) {
5858+ Ok(Nil) -> io.println("Schema is valid!")
5959+ Error(err) -> io.println("Validation failed: " <> lexicon.describe_error(err))
6060+ }
6161+}
6262+```
6363+6464+### Example: Validating Records
6565+6666+```gleam
6767+import lexicon
6868+6969+pub fn validate_post(record_json: String, schema_json: String) {
7070+ case lexicon.validate_record(record_json, schema_json) {
7171+ Ok(Nil) -> {
7272+ // Record is valid, safe to store in database
7373+ store_record(record_json)
7474+ }
7575+ Error(err) -> {
7676+ // Handle validation error
7777+ log_error(lexicon.describe_error(err))
7878+ }
7979+ }
8080+}
8181+```
8282+8383+### Example: Checking NSIDs
8484+8585+```gleam
8686+import lexicon
8787+import gleam/io
8888+8989+pub fn check_collection_name(collection: String) {
9090+ case lexicon.is_valid_nsid(collection) {
9191+ True -> io.println("Valid NSID: " <> collection)
9292+ False -> io.println("Invalid NSID: " <> collection)
9393+ }
9494+}
9595+9696+// Valid NSIDs
9797+check_collection_name("com.atproto.repo.createRecord") // Valid
9898+check_collection_name("app.bsky.feed.post") // Valid
9999+100100+// Invalid NSIDs
101101+check_collection_name("invalid nsid") // Invalid
102102+check_collection_name("UPPERCASE.NOT.ALLOWED") // Invalid
103103+```
104104+105105+## API Reference
106106+107107+### `validate_schemas(json_strings: List(String)) -> Result(Nil, ValidationError)`
108108+109109+Validates one or more lexicon schema documents. Returns `Ok(Nil)` if all schemas are valid.
110110+111111+**Parameters:**
112112+- `json_strings`: List of JSON strings representing lexicon schemas
113113+114114+**Returns:**
115115+- `Ok(Nil)`: All schemas are valid
116116+- `Error(ValidationError)`: One or more schemas failed validation
117117+118118+### `validate_record(record_json: String, schema_json: String) -> Result(Nil, ValidationError)`
119119+120120+Validates a data record against its lexicon schema.
121121+122122+**Parameters:**
123123+- `record_json`: JSON string of the record to validate
124124+- `schema_json`: JSON string of the lexicon schema
125125+126126+**Returns:**
127127+- `Ok(Nil)`: Record is valid according to the schema
128128+- `Error(ValidationError)`: Record validation failed
129129+130130+### `is_valid_nsid(nsid: String) -> Bool`
131131+132132+Checks if a string is a valid NSID (Namespaced Identifier).
133133+134134+**Parameters:**
135135+- `nsid`: String to check
136136+137137+**Returns:**
138138+- `True`: String is a valid NSID
139139+- `False`: String is not a valid NSID
140140+141141+### `describe_error(error: ValidationError) -> String`
142142+143143+Converts a `ValidationError` to a human-readable string.
144144+145145+## Testing
146146+147147+Run the test suite:
148148+149149+```bash
150150+make test
151151+```
152152+153153+## Development
154154+155155+### Project Structure
156156+157157+```
158158+lexicon/
159159+├── gleam.toml # Gleam package configuration
160160+├── Makefile # Build automation
161161+├── README.md # This file
162162+├── src/
163163+│ ├── lexicon.gleam # Main Gleam API
164164+│ └── lexicon_nif.erl # Erlang NIF loader
165165+├── native/
166166+│ └── lexicon_nif/ # Rust NIF implementation
167167+│ ├── Cargo.toml
168168+│ └── src/
169169+│ └── lib.rs
170170+└── priv/ # Compiled NIF library (created by build)
171171+```
172172+173173+### Cleaning
174174+175175+To remove build artifacts:
176176+177177+```bash
178178+make clean
179179+```
180180+181181+## Important Notes
182182+183183+### NIF Safety
184184+185185+Native Implemented Functions (NIFs) run in the same OS process as the BEAM VM. If a NIF crashes, it can bring down the entire runtime rather than just an isolated process. This library includes error handling to minimize this risk, but you should be aware of this limitation.
186186+187187+### Performance
188188+189189+Because validation runs in native Rust code, it's significantly faster than a pure Erlang/Gleam implementation, making it suitable for validating large numbers of schemas or records.
190190+191191+## License
192192+193193+This library uses the MIT-licensed `slices-lexicon` Rust crate.
194194+195195+## Resources
196196+197197+- [AT Protocol Specification](https://atproto.com/)
198198+- [Lexicon Schema Language](https://atproto.com/specs/lexicon)
199199+- [slices-lexicon Rust Crate](https://crates.io/crates/slices-lexicon)
200200+- [Rustler Documentation](https://github.com/rusterlium/rustler)
+9
lexicon/gleam.toml
···11+name = "lexicon"
22+version = "0.1.0"
33+44+[dependencies]
55+gleam_stdlib = ">= 0.60.0 and < 1.0.0"
66+gleam_json = ">= 3.0.2 and < 4.0.0"
77+88+[dev-dependencies]
99+gleeunit = ">= 1.0.0 and < 2.0.0"
+13
lexicon/manifest.toml
···11+# This file was generated by Gleam
22+# You typically do not need to edit this file
33+44+packages = [
55+ { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" },
66+ { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
77+ { name = "gleeunit", version = "1.6.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "FDC68A8C492B1E9B429249062CD9BAC9B5538C6FBF584817205D0998C42E1DAC" },
88+]
99+1010+[requirements]
1111+gleam_json = { version = ">= 3.0.2 and < 4.0.0" }
1212+gleam_stdlib = { version = ">= 0.60.0 and < 1.0.0" }
1313+gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
···11+# Lexicon GraphQL
22+33+Automatic GraphQL schema generation from AT Protocol Lexicon definitions. This package bridges AT Protocol's Lexicon schema system with GraphQL, enabling automatic GraphQL API generation for ATProto records.
44+55+## Features
66+77+### Automatic Schema Generation
88+- **Lexicon Parsing**: Parses AT Protocol lexicon JSON files
99+- **Type Mapping**: Automatically maps Lexicon types to GraphQL types
1010+- **Database Integration**: Generates GraphQL schemas that query database records
1111+- **Field Resolvers**: Auto-generated resolvers for lexicon properties
1212+1313+### Supported Lexicon Types
1414+- `string` → GraphQL String
1515+- `integer` → GraphQL Int
1616+- `boolean` → GraphQL Boolean
1717+- `datetime` → GraphQL String (ISO 8601 format)
1818+- Objects and nested properties
1919+- Arrays/lists
2020+2121+### Database Schema Builder
2222+Generates GraphQL schemas for database-stored ATProto records with:
2323+- Automatic field extraction from database records
2424+- Support for nested lexicon properties
2525+- Proper JSON parsing and field access
2626+- Metadata fields (uri, cid, did, collection, indexedAt)
2727+2828+## Architecture
2929+3030+The package consists of several modules:
3131+3232+- `lexicon_graphql/lexicon_parser.gleam` - Parses lexicon JSON files
3333+- `lexicon_graphql/type_mapper.gleam` - Maps lexicon types to GraphQL types
3434+- `lexicon_graphql/schema_builder.gleam` - Builds GraphQL schemas from lexicons
3535+- `lexicon_graphql/db_schema_builder.gleam` - Database-specific schema generation
3636+- `lexicon_graphql/ref_resolver.gleam` - Resolves lexicon references
3737+- `lexicon_graphql/nsid.gleam` - NSID (Namespaced Identifier) utilities
3838+3939+## Usage
4040+4141+### Creating a Schema from a Lexicon
4242+4343+```gleam
4444+import lexicon_graphql
4545+import lexicon_graphql/db_schema_builder
4646+import graphql/schema
4747+4848+// Parse a lexicon file
4949+let lexicon_json = "{ \"lexicon\": 1, \"id\": \"xyz.statusphere.status\", ... }"
5050+let assert Ok(lexicon) = lexicon_graphql.parse_lexicon(lexicon_json)
5151+5252+// Generate GraphQL schema for database queries
5353+let collection_name = "xyz.statusphere.status"
5454+let graphql_type = db_schema_builder.build_db_record_type(
5555+ collection_name,
5656+ lexicon,
5757+ get_records_fn
5858+)
5959+```
6060+6161+### Example: Status Record Schema
6262+6363+For a lexicon like:
6464+```json
6565+{
6666+ "lexicon": 1,
6767+ "id": "xyz.statusphere.status",
6868+ "defs": {
6969+ "main": {
7070+ "type": "record",
7171+ "record": {
7272+ "type": "object",
7373+ "properties": {
7474+ "status": { "type": "string" },
7575+ "createdAt": { "type": "string", "format": "datetime" }
7676+ }
7777+ }
7878+ }
7979+ }
8080+}
8181+```
8282+8383+The package automatically generates a GraphQL type with:
8484+```graphql
8585+type XyzStatusphereStatus {
8686+ uri: String!
8787+ cid: String!
8888+ did: String!
8989+ collection: String!
9090+ indexedAt: String!
9191+ status: String
9292+ createdAt: String
9393+}
9494+```
9595+9696+## Database Integration
9797+9898+### Record Structure
9999+100100+Records in the database have the following structure:
101101+- `uri`: AT URI of the record
102102+- `cid`: Content identifier
103103+- `did`: DID of the record owner
104104+- `collection`: Lexicon collection name
105105+- `json`: **JSON string** containing the record value
106106+- `indexed_at`: When the record was indexed
107107+108108+### JSON Storage Format
109109+110110+**IMPORTANT**: The `json` field MUST be stored as a proper JSON string, not Gleam/Erlang term syntax.
111111+112112+CORRECT: `{"$type":"xyz.statusphere.status","status":"..","createdAt":"2025-10-28T20:00:00Z"}`
113113+114114+INCORRECT: `dict.from_list([#("status", ".."), #("createdAt", "2025-10-28T20:00:00Z")])`
115115+116116+### Data Conversion
117117+118118+When storing records from Jetstream or backfill operations, always use proper JSON encoding:
119119+120120+```gleam
121121+import gleam/dynamic.{type Dynamic}
122122+123123+// Convert Dynamic (Erlang term) to JSON string
124124+fn dynamic_to_json(value: Dynamic) -> String {
125125+ let iolist = json_encode(value)
126126+ iolist_to_string(iolist)
127127+}
128128+129129+@external(erlang, "json", "encode")
130130+fn json_encode(value: Dynamic) -> Dynamic
131131+```
132132+133133+**Do NOT use** `string.inspect(value)` as it produces Gleam syntax, not JSON.
134134+135135+## Field Resolution
136136+137137+The `db_schema_builder` module provides helper functions for extracting fields from context:
138138+139139+```gleam
140140+// Get a top-level field from context
141141+get_field_from_context(ctx, "fieldName")
142142+143143+// Get a nested field from context
144144+get_nested_field_from_context(ctx, "parent", "child")
145145+```
146146+147147+These functions handle:
148148+- Safe field access with Result types
149149+- Null handling
150150+- Type checking
151151+- Nested object traversal
152152+153153+## Testing
154154+155155+The package uses the `graphql` package's test suite to verify schema generation and execution.
156156+157157+## Dependencies
158158+159159+- `gleam_stdlib` >= 0.44.0
160160+- `gleam_json` >= 3.0.0
161161+- `graphql` (local package)
162162+163163+## Integration Example
164164+165165+```gleam
166166+import lexicon_graphql/db_schema_builder
167167+import database
168168+import graphql/schema
169169+import graphql/executor
170170+171171+// Load lexicon
172172+let lexicon_json = load_lexicon("priv/lexicons/xyz/statusphere/status.json")
173173+let assert Ok(lexicon) = lexicon_graphql.parse_lexicon(lexicon_json)
174174+175175+// Define record fetcher
176176+fn get_records() {
177177+ database.get_records_by_collection(db, "xyz.statusphere.status")
178178+ |> result.map(fn(records) {
179179+ list.map(records, record_to_graphql_value)
180180+ })
181181+}
182182+183183+// Build GraphQL type
184184+let status_type = db_schema_builder.build_db_record_type(
185185+ "xyz.statusphere.status",
186186+ lexicon,
187187+ get_records
188188+)
189189+190190+// Create query type
191191+let query_type = schema.object_type("Query", "Root query", [
192192+ schema.field(
193193+ "statuses",
194194+ schema.list_type(status_type),
195195+ "Get all statuses",
196196+ fn(_) { get_records() }
197197+ )
198198+])
199199+200200+// Create and use schema
201201+let graphql_schema = schema.new(query_type)
202202+executor.execute("{ statuses { status } }", graphql_schema, schema.Context(None))
203203+```
204204+205205+## NSID Support
206206+207207+The package includes utilities for working with NSIDs (Namespaced Identifiers):
208208+209209+```gleam
210210+import lexicon_graphql/nsid
211211+212212+// Convert NSID to GraphQL type name
213213+nsid.to_graphql_name("xyz.statusphere.status")
214214+// → "XyzStatusphereStatus"
215215+216216+// Convert NSID to field name
217217+nsid.to_graphql_field_name("xyz.statusphere.status")
218218+// → "xyzStatusphereStatus"
219219+```
220220+221221+## Development
222222+223223+Run tests:
224224+```sh
225225+cd lexicon_graphql
226226+gleam test
227227+```
228228+229229+Build:
230230+```sh
231231+gleam build
232232+```
233233+234234+## Future Enhancements
235235+236236+- Support for lexicon references ($ref)
237237+- Union types
238238+- Custom validation rules
239239+- Mutation support for creating/updating records
240240+- Subscription support for real-time updates
+21
lexicon_graphql/gleam.toml
···11+name = "lexicon_graphql"
22+version = "1.0.0"
33+44+# Fill out these fields if you intend to generate HTML documentation or publish
55+# your project to the Hex package manager.
66+#
77+# description = ""
88+# licences = ["Apache-2.0"]
99+# repository = { type = "github", user = "", repo = "" }
1010+# links = [{ title = "Website", href = "" }]
1111+#
1212+# For a full reference of all the available options, you can have a look at
1313+# https://gleam.run/writing-gleam/gleam-toml/.
1414+1515+[dependencies]
1616+gleam_stdlib = ">= 0.44.0 and < 2.0.0"
1717+gleam_json = ">= 3.0.0 and < 4.0.0"
1818+graphql = {path = "../graphql"}
1919+2020+[dev-dependencies]
2121+gleeunit = ">= 1.0.0 and < 2.0.0"
+15
lexicon_graphql/manifest.toml
···11+# This file was generated by Gleam
22+# You typically do not need to edit this file
33+44+packages = [
55+ { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" },
66+ { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
77+ { name = "gleeunit", version = "1.7.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "CD701726CBCE5588B375D157B4391CFD0F2F134CD12D9B6998A395484DE05C58" },
88+ { name = "graphql", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], source = "local", path = "../graphql" },
99+]
1010+1111+[requirements]
1212+gleam_json = { version = ">= 3.0.0 and < 4.0.0" }
1313+gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" }
1414+gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
1515+graphql = { path = "../graphql" }
···11+/// Lexicon JSON Parser
22+///
33+/// Parses AT Protocol lexicon JSON documents into structured Lexicon types
44+/// that can be used with the schema builder.
55+import gleam/dict
66+import gleam/dynamic/decode
77+import gleam/json
88+import gleam/list
99+import gleam/result
1010+import lexicon_graphql/schema_builder
1111+1212+/// Parse a lexicon JSON string into a Lexicon type
1313+pub fn parse_lexicon(json_str: String) -> Result(schema_builder.Lexicon, String) {
1414+ // Decode the JSON into a structured format using continuation-passing style
1515+ let decoder = {
1616+ use id <- decode.field("id", decode.string)
1717+ use defs <- decode.field("defs", decode_defs())
1818+ decode.success(schema_builder.Lexicon(id:, defs:))
1919+ }
2020+2121+ json.parse(json_str, decoder)
2222+ |> result.map_error(fn(_) { "Failed to parse lexicon JSON" })
2323+}
2424+2525+/// Create a decoder for the defs object
2626+fn decode_defs() -> decode.Decoder(schema_builder.Defs) {
2727+ use main <- decode.field("main", decode_record_def())
2828+ decode.success(schema_builder.Defs(main:))
2929+}
3030+3131+/// Create a decoder for a record definition
3232+fn decode_record_def() -> decode.Decoder(schema_builder.RecordDef) {
3333+ use type_ <- decode.field("type", decode.string)
3434+ use record <- decode.field("record", decode_record_object())
3535+ decode.success(schema_builder.RecordDef(type_:, properties: record))
3636+}
3737+3838+/// Create a decoder for the record object which contains properties
3939+fn decode_record_object() -> decode.Decoder(
4040+ List(#(String, schema_builder.Property)),
4141+) {
4242+ // This is more complex - we need to decode a dict of properties
4343+ use properties_dict <- decode.field(
4444+ "properties",
4545+ decode.dict(decode.string, decode.dynamic),
4646+ )
4747+ use required_list <- decode.optional_field(
4848+ "required",
4949+ [],
5050+ decode.list(decode.string),
5151+ )
5252+5353+ // Convert dict to list of properties
5454+ let properties =
5555+ properties_dict
5656+ |> dict.to_list
5757+ |> list.map(fn(entry) {
5858+ let #(name, prop_dyn) = entry
5959+ let is_required = list.contains(required_list, name)
6060+6161+ // Extract the type from the property
6262+ let prop_type = case decode_property_type(prop_dyn) {
6363+ Ok(type_) -> type_
6464+ Error(_) -> "string"
6565+ // Default fallback
6666+ }
6767+6868+ #(name, schema_builder.Property(prop_type, is_required))
6969+ })
7070+7171+ decode.success(properties)
7272+}
7373+7474+/// Decode a property's type field
7575+fn decode_property_type(
7676+ dyn: decode.Dynamic,
7777+) -> Result(String, List(decode.DecodeError)) {
7878+ let type_decoder = {
7979+ use type_ <- decode.field("type", decode.string)
8080+ decode.success(type_)
8181+ }
8282+ decode.run(dyn, type_decoder)
8383+}
+65
lexicon_graphql/src/lexicon_graphql/nsid.gleam
···11+/// NSID (Namespaced Identifier) utilities
22+///
33+/// NSIDs are used throughout AT Protocol to identify lexicons, collections,
44+/// and other namespaced resources. They follow the format: "domain.name.thing"
55+///
66+/// This module provides utilities for converting NSIDs to GraphQL-friendly names.
77+import gleam/list
88+import gleam/result
99+import gleam/string
1010+1111+/// Converts an NSID to a GraphQL type name (PascalCase).
1212+///
1313+/// ## Examples
1414+///
1515+/// ```gleam
1616+/// to_type_name("xyz.statusphere.status") // "XyzStatusphereStatus"
1717+/// to_type_name("app.bsky.feed.post") // "AppBskyFeedPost"
1818+/// ```
1919+pub fn to_type_name(nsid: String) -> String {
2020+ nsid
2121+ |> string.split(".")
2222+ |> list.map(capitalize_first)
2323+ |> string.join("")
2424+}
2525+2626+/// Converts an NSID to a GraphQL field name (camelCase).
2727+///
2828+/// ## Examples
2929+///
3030+/// ```gleam
3131+/// to_field_name("xyz.statusphere.status") // "xyzStatusphereStatus"
3232+/// to_field_name("app.bsky.feed.post") // "appBskyFeedPost"
3333+/// ```
3434+pub fn to_field_name(nsid: String) -> String {
3535+ case string.split(nsid, ".") {
3636+ [] -> nsid
3737+ [first, ..rest] -> {
3838+ let capitalized_rest = list.map(rest, capitalize_first)
3939+ string.join([first, ..capitalized_rest], "")
4040+ }
4141+ }
4242+}
4343+4444+/// Extracts the collection name from an NSID (last segment).
4545+///
4646+/// ## Examples
4747+///
4848+/// ```gleam
4949+/// to_collection_name("xyz.statusphere.status") // "status"
5050+/// to_collection_name("app.bsky.feed.post") // "post"
5151+/// ```
5252+pub fn to_collection_name(nsid: String) -> String {
5353+ nsid
5454+ |> string.split(".")
5555+ |> list.last
5656+ |> result.unwrap("")
5757+}
5858+5959+/// Capitalizes the first letter of a string.
6060+fn capitalize_first(s: String) -> String {
6161+ case string.pop_grapheme(s) {
6262+ Ok(#(first, rest)) -> string.uppercase(first) <> rest
6363+ Error(_) -> s
6464+ }
6565+}
···11+/// Lexicon Reference Resolver
22+///
33+/// Resolves ref types in lexicon definitions to their actual types.
44+/// Handles both local references (within same lexicon) and external references.
55+///
66+/// Reference URI format:
77+/// - "nsid" - references the main definition of that NSID
88+/// - "nsid#fragment" - references a specific definition within that NSID
99+import gleam/list
1010+import gleam/string
1111+import lexicon_graphql/schema_builder
1212+1313+/// Parse a reference URI into NSID and definition name
1414+///
1515+/// ## Examples
1616+///
1717+/// ```gleam
1818+/// parse_ref_uri("xyz.statusphere.profile")
1919+/// // #("xyz.statusphere.profile", "main")
2020+///
2121+/// parse_ref_uri("xyz.statusphere.post#embed")
2222+/// // #("xyz.statusphere.post", "embed")
2323+/// ```
2424+pub fn parse_ref_uri(ref_uri: String) -> #(String, String) {
2525+ case string.split(ref_uri, "#") {
2626+ [nsid] -> #(nsid, "main")
2727+ [nsid, fragment] -> #(nsid, fragment)
2828+ _ -> #(ref_uri, "main")
2929+ }
3030+}
3131+3232+/// Resolves a reference URI to the actual lexicon definition
3333+///
3434+/// Returns the NSID of the referenced type if found, Error if not found
3535+pub fn resolve_ref(
3636+ ref_uri: String,
3737+ lexicons: List(schema_builder.Lexicon),
3838+) -> Result(String, String) {
3939+ let #(nsid, _fragment) = parse_ref_uri(ref_uri)
4040+4141+ // Look for the lexicon with this NSID
4242+ case find_lexicon(nsid, lexicons) {
4343+ Ok(_lexicon) -> Ok(nsid)
4444+ Error(Nil) -> Error("Reference not found: " <> ref_uri)
4545+ }
4646+}
4747+4848+/// Find a lexicon by its NSID
4949+fn find_lexicon(
5050+ nsid: String,
5151+ lexicons: List(schema_builder.Lexicon),
5252+) -> Result(schema_builder.Lexicon, Nil) {
5353+ lexicons
5454+ |> list.find(fn(lex) { lex.id == nsid })
5555+}
···11+# server
22+33+[](https://hex.pm/packages/atproto_gleam)
44+[](https://hexdocs.pm/atproto_gleam/)
55+66+```sh
77+gleam add server@1
88+```
99+```gleam
1010+import server
1111+1212+pub fn main() -> Nil {
1313+ // TODO: An example of the project in use
1414+}
1515+```
1616+1717+Further documentation can be found at <https://hexdocs.pm/server>.
1818+1919+## Development
2020+2121+```sh
2222+gleam run # Run the project
2323+gleam test # Run the tests
2424+```
+39
server/gleam.toml
···11+name = "server"
22+version = "1.0.0"
33+44+# Fill out these fields if you intend to generate HTML documentation or publish
55+# your project to the Hex package manager.
66+#
77+# description = ""
88+# licences = ["Apache-2.0"]
99+# repository = { type = "github", user = "", repo = "" }
1010+# links = [{ title = "Website", href = "" }]
1111+#
1212+# For a full reference of all the available options, you can have a look at
1313+# https://gleam.run/writing-gleam/gleam-toml/.
1414+1515+[dependencies]
1616+jetstream = { path = "../jetstream" }
1717+lexicon = { path = "../lexicon" }
1818+graphql = { path = "../graphql" }
1919+lexicon_graphql = { path = "../lexicon_graphql" }
2020+gleam_stdlib = ">= 0.60.0 and < 1.0.0"
2121+mist = ">= 5.0.3 and < 6.0.0"
2222+wisp = ">= 2.1.0 and < 3.0.0"
2323+gleam_erlang = ">= 1.0.0 and < 2.0.0"
2424+gleam_otp = ">= 1.2.0 and < 2.0.0"
2525+gleam_http = ">= 4.0.0 and < 5.0.0"
2626+gleam_json = ">= 3.0.2 and < 4.0.0"
2727+gleam_httpc = ">= 5.0.0 and < 6.0.0"
2828+sqlight = ">= 1.0.0 and < 2.0.0"
2929+gleam_time = ">= 1.4.0 and < 2.0.0"
3030+lustre = ">= 5.0.0 and < 6.0.0"
3131+simplifile = ">= 2.0.0 and < 3.0.0"
3232+argv = ">= 1.0.0 and < 2.0.0"
3333+jose = ">= 1.11.10 and < 2.0.0"
3434+envoy = ">= 1.0.2 and < 2.0.0"
3535+dotenv_gleam = ">= 2.0.1 and < 3.0.0"
3636+thoas = ">= 1.0.0 and < 2.0.0"
3737+3838+[dev-dependencies]
3939+gleeunit = ">= 1.0.0 and < 2.0.0"
+67
server/manifest.toml
···11+# This file was generated by Gleam
22+# You typically do not need to edit this file
33+44+packages = [
55+ { name = "argv", version = "1.0.2", build_tools = ["gleam"], requirements = [], otp_app = "argv", source = "hex", outer_checksum = "BA1FF0929525DEBA1CE67256E5ADF77A7CDDFE729E3E3F57A5BDCAA031DED09D" },
66+ { name = "cowlib", version = "2.16.0", build_tools = ["make", "rebar3"], requirements = [], otp_app = "cowlib", source = "hex", outer_checksum = "7F478D80D66B747344F0EA7708C187645CFCC08B11AA424632F78E25BF05DB51" },
77+ { name = "directories", version = "1.2.0", build_tools = ["gleam"], requirements = ["envoy", "gleam_stdlib", "platform", "simplifile"], otp_app = "directories", source = "hex", outer_checksum = "D13090CFCDF6759B87217E8DDD73A75903A700148A82C1D33799F333E249BF9E" },
88+ { name = "dotenv_gleam", version = "2.0.1", build_tools = ["gleam"], requirements = ["envoy", "gleam_erlang", "gleam_stdlib", "simplifile"], otp_app = "dotenv_gleam", source = "hex", outer_checksum = "47391525F97AF2086B34A4F2E81C1A1102863ACA983540CD87A8D295B1636445" },
99+ { name = "envoy", version = "1.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "envoy", source = "hex", outer_checksum = "95FD059345AA982E89A0B6E2A3BF1CF43E17A7048DCD85B5B65D3B9E4E39D359" },
1010+ { name = "esqlite", version = "0.9.0", build_tools = ["rebar3"], requirements = [], otp_app = "esqlite", source = "hex", outer_checksum = "CCF72258A4EE152EC7AD92AA9A03552EB6CA1B06B65C93AD5B6E55C302E05855" },
1111+ { name = "exception", version = "2.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "exception", source = "hex", outer_checksum = "329D269D5C2A314F7364BD2711372B6F2C58FA6F39981572E5CA68624D291F8C" },
1212+ { name = "filepath", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "B06A9AF0BF10E51401D64B98E4B627F1D2E48C154967DA7AF4D0914780A6D40A" },
1313+ { name = "gleam_crypto", version = "1.5.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_crypto", source = "hex", outer_checksum = "50774BAFFF1144E7872814C566C5D653D83A3EBF23ACC3156B757A1B6819086E" },
1414+ { name = "gleam_erlang", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_erlang", source = "hex", outer_checksum = "1124AD3AA21143E5AF0FC5CF3D9529F6DB8CA03E43A55711B60B6B7B3874375C" },
1515+ { name = "gleam_http", version = "4.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_http", source = "hex", outer_checksum = "82EA6A717C842456188C190AFB372665EA56CE13D8559BF3B1DD9E40F619EE0C" },
1616+ { name = "gleam_httpc", version = "5.0.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_http", "gleam_stdlib"], otp_app = "gleam_httpc", source = "hex", outer_checksum = "C545172618D07811494E97AAA4A0FB34DA6F6D0061FDC8041C2F8E3BE2B2E48F" },
1717+ { name = "gleam_json", version = "3.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "874FA3C3BB6E22DD2BB111966BD40B3759E9094E05257899A7C08F5DE77EC049" },
1818+ { name = "gleam_otp", version = "1.2.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_stdlib"], otp_app = "gleam_otp", source = "hex", outer_checksum = "BA6A294E295E428EC1562DC1C11EA7530DCB981E8359134BEABC8493B7B2258E" },
1919+ { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
2020+ { name = "gleam_time", version = "1.4.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_time", source = "hex", outer_checksum = "DCDDC040CE97DA3D2A925CDBBA08D8A78681139745754A83998641C8A3F6587E" },
2121+ { name = "gleam_yielder", version = "1.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_yielder", source = "hex", outer_checksum = "8E4E4ECFA7982859F430C57F549200C7749823C106759F4A19A78AEA6687717A" },
2222+ { name = "gleeunit", version = "1.6.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "FDC68A8C492B1E9B429249062CD9BAC9B5538C6FBF584817205D0998C42E1DAC" },
2323+ { name = "glisten", version = "8.0.1", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_otp", "gleam_stdlib", "logging", "telemetry"], otp_app = "glisten", source = "hex", outer_checksum = "534BB27C71FB9E506345A767C0D76B17A9E9199934340C975DC003C710E3692D" },
2424+ { name = "gramps", version = "6.0.0", build_tools = ["gleam"], requirements = ["gleam_crypto", "gleam_erlang", "gleam_http", "gleam_stdlib"], otp_app = "gramps", source = "hex", outer_checksum = "8B7195978FBFD30B43DF791A8A272041B81E45D245314D7A41FC57237AA882A0" },
2525+ { name = "graphql", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], source = "local", path = "../graphql" },
2626+ { name = "gun", version = "2.2.0", build_tools = ["make", "rebar3"], requirements = ["cowlib"], otp_app = "gun", source = "hex", outer_checksum = "76022700C64287FEB4DF93A1795CFF6741B83FB37415C40C34C38D2A4645261A" },
2727+ { name = "houdini", version = "1.2.0", build_tools = ["gleam"], requirements = [], otp_app = "houdini", source = "hex", outer_checksum = "5DB1053F1AF828049C2B206D4403C18970ABEF5C18671CA3C2D2ED0DD64F6385" },
2828+ { name = "hpack_erl", version = "0.3.0", build_tools = ["rebar3"], requirements = [], otp_app = "hpack", source = "hex", outer_checksum = "D6137D7079169D8C485C6962DFE261AF5B9EF60FBC557344511C1E65E3D95FB0" },
2929+ { name = "jetstream", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_http", "gleam_json", "gleam_stdlib", "gun"], source = "local", path = "../jetstream" },
3030+ { name = "jose", version = "1.11.10", build_tools = ["mix", "rebar3"], requirements = [], otp_app = "jose", source = "hex", outer_checksum = "0D6CD36FF8BA174DB29148FC112B5842186B68A90CE9FC2B3EC3AFE76593E614" },
3131+ { name = "lexicon", version = "0.1.0", build_tools = ["gleam"], requirements = ["gleam_json", "gleam_stdlib"], source = "local", path = "../lexicon" },
3232+ { name = "lexicon_graphql", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_json", "gleam_stdlib", "graphql"], source = "local", path = "../lexicon_graphql" },
3333+ { name = "logging", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "logging", source = "hex", outer_checksum = "1098FBF10B54B44C2C7FDF0B01C1253CAFACDACABEFB4B0D027803246753E06D" },
3434+ { name = "lustre", version = "5.3.5", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_json", "gleam_otp", "gleam_stdlib", "houdini"], otp_app = "lustre", source = "hex", outer_checksum = "5CBB5DD2849D8316A2101792FC35AEB58CE4B151451044A9C2A2A70A2F7FCEB8" },
3535+ { name = "marceau", version = "1.3.0", build_tools = ["gleam"], requirements = [], otp_app = "marceau", source = "hex", outer_checksum = "2D1C27504BEF45005F5DFB18591F8610FB4BFA91744878210BDC464412EC44E9" },
3636+ { name = "mist", version = "5.0.3", build_tools = ["gleam"], requirements = ["exception", "gleam_erlang", "gleam_http", "gleam_otp", "gleam_stdlib", "gleam_yielder", "glisten", "gramps", "hpack_erl", "logging"], otp_app = "mist", source = "hex", outer_checksum = "7C4BE717A81305323C47C8A591E6B9BA4AC7F56354BF70B4D3DF08CC01192668" },
3737+ { name = "platform", version = "1.0.0", build_tools = ["gleam"], requirements = [], otp_app = "platform", source = "hex", outer_checksum = "8339420A95AD89AAC0F82F4C3DB8DD401041742D6C3F46132A8739F6AEB75391" },
3838+ { name = "simplifile", version = "2.3.0", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "0A868DAC6063D9E983477981839810DC2E553285AB4588B87E3E9C96A7FB4CB4" },
3939+ { name = "sqlight", version = "1.0.3", build_tools = ["gleam"], requirements = ["esqlite", "gleam_stdlib"], otp_app = "sqlight", source = "hex", outer_checksum = "CADD79663C9B61D4BAC960A47CC2D42CA8F48EAF5804DBEB79977287750F4B16" },
4040+ { name = "telemetry", version = "1.3.0", build_tools = ["rebar3"], requirements = [], otp_app = "telemetry", source = "hex", outer_checksum = "7015FC8919DBE63764F4B4B87A95B7C0996BD539E0D499BE6EC9D7F3875B79E6" },
4141+ { name = "thoas", version = "1.2.1", build_tools = ["rebar3"], requirements = [], otp_app = "thoas", source = "hex", outer_checksum = "E38697EDFFD6E91BD12CEA41B155115282630075C2A727E7A6B2947F5408B86A" },
4242+ { name = "wisp", version = "2.1.0", build_tools = ["gleam"], requirements = ["directories", "exception", "filepath", "gleam_crypto", "gleam_erlang", "gleam_http", "gleam_json", "gleam_stdlib", "houdini", "logging", "marceau", "mist", "simplifile"], otp_app = "wisp", source = "hex", outer_checksum = "362BDDD11BF48EB38CDE51A73BC7D1B89581B395CA998E3F23F11EC026151C54" },
4343+]
4444+4545+[requirements]
4646+argv = { version = ">= 1.0.0 and < 2.0.0" }
4747+dotenv_gleam = { version = ">= 2.0.1 and < 3.0.0" }
4848+envoy = { version = ">= 1.0.2 and < 2.0.0" }
4949+gleam_erlang = { version = ">= 1.0.0 and < 2.0.0" }
5050+gleam_http = { version = ">= 4.0.0 and < 5.0.0" }
5151+gleam_httpc = { version = ">= 5.0.0 and < 6.0.0" }
5252+gleam_json = { version = ">= 3.0.2 and < 4.0.0" }
5353+gleam_otp = { version = ">= 1.2.0 and < 2.0.0" }
5454+gleam_stdlib = { version = ">= 0.60.0 and < 1.0.0" }
5555+gleam_time = { version = ">= 1.4.0 and < 2.0.0" }
5656+gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
5757+graphql = { path = "../graphql" }
5858+jetstream = { path = "../jetstream" }
5959+jose = { version = ">= 1.11.10 and < 2.0.0" }
6060+lexicon = { path = "../lexicon" }
6161+lexicon_graphql = { path = "../lexicon_graphql" }
6262+lustre = { version = ">= 5.0.0 and < 6.0.0" }
6363+mist = { version = ">= 5.0.3 and < 6.0.0" }
6464+simplifile = { version = ">= 2.0.0 and < 3.0.0" }
6565+sqlight = { version = ">= 1.0.0 and < 2.0.0" }
6666+thoas = { version = ">= 1.0.0 and < 2.0.0" }
6767+wisp = { version = ">= 2.1.0 and < 3.0.0" }