···8686- ✅ **Circular Dependency Detection**: prevents infinite reference loops
8787- ✅ **Detailed Error Messages**: validation errors with path information
88888989+## CLI Usage
9090+9191+Validate lexicon files from the command line:
9292+9393+```sh
9494+# Validate a single file
9595+gleam run -m honk check ./lexicons/xyz/statusphere/status.json
9696+9797+# Validate all .json files in a directory
9898+gleam run -m honk check ./lexicons/
9999+100100+# Show help
101101+gleam run -m honk help
102102+```
103103+104104+When validating a directory, all lexicons are loaded together to resolve cross-lexicon references
105105+89106## API Overview
9010791108### Main Functions
+2
gleam.toml
···88gleam_json = ">= 3.0.0 and < 4.0.0"
99gleam_regexp = ">= 1.0.0 and < 2.0.0"
1010gleam_time = ">= 1.5.0 and < 2.0.0"
1111+simplifile = ">= 2.3.1 and < 3.0.0"
1212+argv = ">= 1.0.2 and < 2.0.0"
11131214[dev-dependencies]
1315gleeunit = ">= 1.0.0 and < 2.0.0"
+5
manifest.toml
···22# You typically do not need to edit this file
3344packages = [
55+ { name = "argv", version = "1.0.2", build_tools = ["gleam"], requirements = [], otp_app = "argv", source = "hex", outer_checksum = "BA1FF0929525DEBA1CE67256E5ADF77A7CDDFE729E3E3F57A5BDCAA031DED09D" },
66+ { name = "filepath", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "B06A9AF0BF10E51401D64B98E4B627F1D2E48C154967DA7AF4D0914780A6D40A" },
57 { name = "gleam_json", version = "3.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "44FDAA8847BE8FC48CA7A1C089706BD54BADCC4C45B237A992EDDF9F2CDB2836" },
68 { name = "gleam_regexp", version = "1.1.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_regexp", source = "hex", outer_checksum = "9C215C6CA84A5B35BB934A9B61A9A306EC743153BE2B0425A0D032E477B062A9" },
79 { name = "gleam_stdlib", version = "0.65.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "7C69C71D8C493AE11A5184828A77110EB05A7786EBF8B25B36A72F879C3EE107" },
810 { name = "gleam_time", version = "1.5.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_time", source = "hex", outer_checksum = "D560E672C7279C89908981E068DF07FD16D0C859DCA266F908B18F04DF0EB8E6" },
911 { name = "gleeunit", version = "1.9.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "DA9553CE58B67924B3C631F96FE3370C49EB6D6DC6B384EC4862CC4AAA718F3C" },
1212+ { name = "simplifile", version = "2.3.1", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "957E0E5B75927659F1D2A1B7B75D7B9BA96FAA8D0C53EA71C4AD9CD0C6B848F6" },
1013]
11141215[requirements]
1616+argv = { version = ">= 1.0.2 and < 2.0.0" }
1317gleam_json = { version = ">= 3.0.0 and < 4.0.0" }
1418gleam_regexp = { version = ">= 1.0.0 and < 2.0.0" }
1519gleam_stdlib = { version = ">= 0.44.0 and < 2.0.0" }
1620gleam_time = { version = ">= 1.5.0 and < 2.0.0" }
1721gleeunit = { version = ">= 1.0.0 and < 2.0.0" }
2222+simplifile = { version = ">= 2.3.1 and < 3.0.0" }
+285-23
src/honk.gleam
···11// Main public API for the ATProtocol lexicon validator
2233+import argv
34import gleam/dict.{type Dict}
55+import gleam/dynamic/decode
66+import gleam/int
77+import gleam/io
48import gleam/json.{type Json}
59import gleam/list
610import gleam/option.{None, Some}
711import gleam/result
1212+import gleam/string
813import honk/errors
914import honk/internal/json_helpers
1015import honk/types
1116import honk/validation/context
1217import honk/validation/formats
1818+import simplifile
13191420// Import validators
1521import honk/validation/field as validation_field
···6773 Ok(_) -> errors_acc
6874 Error(e) -> {
6975 // Include def name in error for better context
7070- let error_msg =
7171- lex_id
7272- <> "#"
7373- <> def_name
7474- <> ": "
7575- <> errors.to_string(e)
7676+ // Extract just the message without wrapper text
7777+ let message = case e {
7878+ errors.InvalidSchema(msg) -> msg
7979+ errors.DataValidation(msg) -> msg
8080+ errors.LexiconNotFound(msg) -> "Lexicon not found: " <> msg
8181+ }
8282+ // Clean up leading ": " if present
8383+ let clean_message = case string.starts_with(message, ": ") {
8484+ True -> string.drop_start(message, 2)
8585+ False -> message
8686+ }
8787+ let error_msg = lex_id <> "#" <> def_name <> ": " <> clean_message
7688 case dict.get(errors_acc, lex_id) {
7789 Ok(existing_errors) ->
7890 dict.insert(errors_acc, lex_id, [
···185197 }
186198}
187199188188-/// Entry point for the honk lexicon validator.
200200+/// CLI entry point for the honk lexicon validator
189201///
190190-/// This function serves as an example entry point and can be used
191191-/// for basic CLI or testing purposes. For actual validation,
192192-/// use the `validate()` or `validate_record()` functions.
193193-///
194194-/// ## Example
195195-///
196196-/// ```gleam
197197-/// import honk
198198-///
199199-/// pub fn main() {
200200-/// honk.main()
201201-/// }
202202-/// ```
202202+/// Usage:
203203+/// gleam run -m honk check <path>
204204+/// gleam run -m honk help
203205pub fn main() -> Nil {
204204- // This would typically be called from tests or CLI
205205- let _example_result = is_valid_nsid("com.example.record")
206206- Nil
206206+ case argv.load().arguments {
207207+ ["check", path] -> validate_path(path)
208208+ ["help"] | [] -> show_help()
209209+ _ -> {
210210+ io.println_error("Unknown command. Use 'help' for usage information.")
211211+ Nil
212212+ }
213213+ }
214214+}
215215+216216+/// Validate a path (auto-detects file or directory)
217217+fn validate_path(path: String) -> Nil {
218218+ case simplifile.is_file(path) {
219219+ Ok(True) -> validate_file(path)
220220+ Ok(False) ->
221221+ case simplifile.is_directory(path) {
222222+ Ok(True) -> validate_directory(path)
223223+ Ok(False) -> {
224224+ io.println_error("Error: Path is neither a file nor a directory: " <> path)
225225+ Nil
226226+ }
227227+ Error(_) -> {
228228+ io.println_error("Error: Cannot access path: " <> path)
229229+ Nil
230230+ }
231231+ }
232232+ Error(_) -> {
233233+ io.println_error("Error: Cannot access path: " <> path)
234234+ Nil
235235+ }
236236+ }
237237+}
238238+239239+/// Validate a single lexicon file
240240+fn validate_file(file_path: String) -> Nil {
241241+ case read_and_validate_file(file_path) {
242242+ Ok(_) -> {
243243+ io.println("✓ " <> file_path <> " - valid")
244244+ Nil
245245+ }
246246+ Error(msg) -> {
247247+ io.println_error("✗ " <> file_path)
248248+ io.println_error(" " <> msg)
249249+ Nil
250250+ }
251251+ }
252252+}
253253+254254+/// Validate all .json files in a directory
255255+fn validate_directory(dir_path: String) -> Nil {
256256+ case simplifile.get_files(dir_path) {
257257+ Error(_) -> {
258258+ io.println_error("Error: Cannot read directory: " <> dir_path)
259259+ Nil
260260+ }
261261+ Ok(all_files) -> {
262262+ // Filter for .json files
263263+ let json_files =
264264+ all_files
265265+ |> list.filter(fn(path) { string.ends_with(path, ".json") })
266266+267267+ case json_files {
268268+ [] -> {
269269+ io.println("No .json files found in " <> dir_path)
270270+ Nil
271271+ }
272272+ files -> {
273273+ // Read and parse all files
274274+ let file_results =
275275+ files
276276+ |> list.map(fn(file) {
277277+ case read_json_file(file) {
278278+ Ok(json_value) -> #(file, Ok(json_value))
279279+ Error(msg) -> #(file, Error(msg))
280280+ }
281281+ })
282282+283283+ // Separate successful parses from failures
284284+ let #(parse_errors, parsed_files) =
285285+ list.partition(file_results, fn(result) {
286286+ case result {
287287+ #(_, Error(_)) -> True
288288+ #(_, Ok(_)) -> False
289289+ }
290290+ })
291291+292292+ // Display parse errors
293293+ parse_errors
294294+ |> list.each(fn(result) {
295295+ case result {
296296+ #(file, Error(msg)) -> {
297297+ io.println_error("✗ " <> file)
298298+ io.println_error(" " <> msg)
299299+ }
300300+ _ -> Nil
301301+ }
302302+ })
303303+304304+ // Get all successfully parsed lexicons
305305+ let lexicons =
306306+ parsed_files
307307+ |> list.filter_map(fn(result) {
308308+ case result {
309309+ #(_, Ok(json)) -> Ok(json)
310310+ _ -> Error(Nil)
311311+ }
312312+ })
313313+314314+ // Validate all lexicons together (allows cross-lexicon references)
315315+ case validate(lexicons) {
316316+ Ok(_) -> {
317317+ // All lexicons are valid
318318+ parsed_files
319319+ |> list.each(fn(result) {
320320+ case result {
321321+ #(file, Ok(_)) -> io.println("✓ " <> file)
322322+ _ -> Nil
323323+ }
324324+ })
325325+ }
326326+ Error(error_map) -> {
327327+ // Some lexicons have errors - map errors back to files
328328+ parsed_files
329329+ |> list.each(fn(result) {
330330+ case result {
331331+ #(file, Ok(json)) -> {
332332+ // Get the lexicon ID for this file
333333+ case json_helpers.get_string(json, "id") {
334334+ Some(lex_id) -> {
335335+ case dict.get(error_map, lex_id) {
336336+ Ok(errors) -> {
337337+ io.println_error("✗ " <> file)
338338+ errors
339339+ |> list.each(fn(err) {
340340+ io.println_error(" " <> err)
341341+ })
342342+ }
343343+ Error(_) -> io.println("✓ " <> file)
344344+ }
345345+ }
346346+ None -> {
347347+ io.println_error("✗ " <> file)
348348+ io.println_error(" Missing lexicon id")
349349+ }
350350+ }
351351+ }
352352+ _ -> Nil
353353+ }
354354+ })
355355+ }
356356+ }
357357+358358+ // Summary
359359+ let total = list.length(files)
360360+ let parse_error_count = list.length(parse_errors)
361361+ let validation_error_count = case validate(lexicons) {
362362+ Ok(_) -> 0
363363+ Error(error_map) -> dict.size(error_map)
364364+ }
365365+ let total_errors = parse_error_count + validation_error_count
366366+367367+ case total_errors {
368368+ 0 ->
369369+ io.println(
370370+ "\nAll " <> int.to_string(total) <> " schemas validated successfully.",
371371+ )
372372+ _ ->
373373+ io.println_error(
374374+ "\n"
375375+ <> int.to_string(total_errors)
376376+ <> " of "
377377+ <> int.to_string(total)
378378+ <> " schemas failed validation.",
379379+ )
380380+ }
381381+382382+ Nil
383383+ }
384384+ }
385385+ }
386386+ }
387387+}
388388+389389+/// Read and parse a JSON file (without validation)
390390+fn read_json_file(file_path: String) -> Result(Json, String) {
391391+ use content <- result.try(
392392+ simplifile.read(file_path)
393393+ |> result.map_error(fn(_) { "Cannot read file" }),
394394+ )
395395+396396+ use json_dynamic <- result.try(
397397+ json.parse(content, decode.dynamic)
398398+ |> result.map_error(fn(_) { "Invalid JSON" }),
399399+ )
400400+401401+ json_helpers.dynamic_to_json(json_dynamic)
402402+ |> result.map_error(fn(_) { "Failed to convert JSON" })
403403+}
404404+405405+/// Read a file and validate it as a lexicon
406406+fn read_and_validate_file(file_path: String) -> Result(Nil, String) {
407407+ use content <- result.try(
408408+ simplifile.read(file_path)
409409+ |> result.map_error(fn(_) { "Cannot read file" }),
410410+ )
411411+412412+ use json_dynamic <- result.try(
413413+ json.parse(content, decode.dynamic)
414414+ |> result.map_error(fn(_) { "Invalid JSON" }),
415415+ )
416416+417417+ use json_value <- result.try(
418418+ json_helpers.dynamic_to_json(json_dynamic)
419419+ |> result.map_error(fn(_) { "Failed to convert JSON" }),
420420+ )
421421+422422+ use _ <- result.try(
423423+ validate([json_value])
424424+ |> result.map_error(fn(error_map) { format_validation_errors(error_map) }),
425425+ )
426426+427427+ Ok(Nil)
428428+}
429429+430430+/// Format validation errors from the error map
431431+fn format_validation_errors(error_map: Dict(String, List(String))) -> String {
432432+ error_map
433433+ |> dict.to_list
434434+ |> list.map(fn(entry) {
435435+ let #(_key, errors) = entry
436436+ string.join(errors, "\n ")
437437+ })
438438+ |> string.join("\n ")
439439+}
440440+441441+/// Show help text
442442+fn show_help() -> Nil {
443443+ io.println(
444444+ "
445445+honk - ATProtocol Lexicon Validator
446446+447447+USAGE:
448448+ gleam run -m honk check <path>
449449+ gleam run -m honk help
450450+451451+COMMANDS:
452452+ check <path> Check a lexicon file or directory
453453+ - If <path> is a file: validates that single lexicon
454454+ - If <path> is a directory: recursively validates all .json files
455455+456456+ help Show this help message
457457+458458+EXAMPLES:
459459+ gleam run -m honk check ./lexicons/xyz/statusphere/status.json
460460+ gleam run -m honk check ./lexicons
461461+462462+VALIDATION:
463463+ - Validates lexicon structure (id, defs)
464464+ - Validates ALL definitions in each lexicon
465465+ - Checks types, constraints, and references
466466+ - Reports errors with definition context (lex.id#defName)
467467+",
468468+ )
207469}
+32-1
src/honk/validation/field/reference.gleam
···4040 use ref_str <- result.try(ref_value)
41414242 // Validate reference syntax
4343- validate_ref_syntax(ref_str, def_name)
4343+ use _ <- result.try(validate_ref_syntax(ref_str, def_name))
4444+4545+ // Validate that the reference can be resolved (only for global refs with full context)
4646+ case string.starts_with(ref_str, "#") {
4747+ True -> Ok(Nil) // Local ref - will be validated in same lexicon
4848+ False -> {
4949+ // Global ref - check it exists in catalog if we have a current lexicon
5050+ case context.current_lexicon_id(ctx) {
5151+ Some(lex_id) -> {
5252+ // We have a full validation context, so validate reference resolution
5353+ use resolved <- result.try(resolution.resolve_reference(
5454+ ref_str,
5555+ ctx,
5656+ lex_id,
5757+ ))
5858+5959+ case resolved {
6060+ Some(_) -> Ok(Nil)
6161+ None ->
6262+ Error(errors.invalid_schema(
6363+ def_name <> ": reference not found: " <> ref_str,
6464+ ))
6565+ }
6666+ }
6767+ None -> {
6868+ // No current lexicon (e.g., unit test context)
6969+ // Just validate syntax, can't check if reference exists
7070+ Ok(Nil)
7171+ }
7272+ }
7373+ }
7474+ }
4475}
45764677/// Validates data against the referenced schema