Highly ambitious ATProtocol AppView service and sdks

simplify lexicon intellisense impl, fix various validation issues, improve error formatting

+1310 -501
+40
deno.lock
··· 649 649 } 650 650 }, 651 651 "remote": { 652 + "https://deno.land/std@0.200.0/_util/os.ts": "d932f56d41e4f6a6093d56044e29ce637f8dcc43c5a90af43504a889cf1775e3", 653 + "https://deno.land/std@0.200.0/assert/assert.ts": "9a97dad6d98c238938e7540736b826440ad8c1c1e54430ca4c4e623e585607ee", 654 + "https://deno.land/std@0.200.0/assert/assertion_error.ts": "4d0bde9b374dfbcbe8ac23f54f567b77024fb67dbb1906a852d67fe050d42f56", 655 + "https://deno.land/std@0.200.0/fs/_util.ts": "fbf57dcdc9f7bc8128d60301eece608246971a7836a3bb1e78da75314f08b978", 656 + "https://deno.land/std@0.200.0/fs/walk.ts": "b93f21694711ac5bed9297a7c5a90ed7669c52c4aab1bf6166dd8fe9655bb158", 657 + "https://deno.land/std@0.200.0/path/_basename.ts": "057d420c9049821f983f784fd87fa73ac471901fb628920b67972b0f44319343", 658 + "https://deno.land/std@0.200.0/path/_constants.ts": "e49961f6f4f48039c0dfed3c3f93e963ca3d92791c9d478ac5b43183413136e0", 659 + "https://deno.land/std@0.200.0/path/_dirname.ts": "355e297236b2218600aee7a5301b937204c62e12da9db4b0b044993d9e658395", 660 + "https://deno.land/std@0.200.0/path/_extname.ts": "eaaa5aae1acf1f03254d681bd6a8ce42a9cb5b7ff2213a9d4740e8ab31283664", 661 + "https://deno.land/std@0.200.0/path/_format.ts": "4a99270d6810f082e614309164fad75d6f1a483b68eed97c830a506cc589f8b4", 662 + "https://deno.land/std@0.200.0/path/_from_file_url.ts": "7e4e5626089785adddb061f1b9f4932d6b21c7df778e7449531a11e32048245c", 663 + "https://deno.land/std@0.200.0/path/_interface.ts": "6471159dfbbc357e03882c2266d21ef9afdb1e4aa771b0545e90db58a0ba314b", 664 + "https://deno.land/std@0.200.0/path/_is_absolute.ts": "05dac10b5e93c63198b92e3687baa2be178df5321c527dc555266c0f4f51558c", 665 + "https://deno.land/std@0.200.0/path/_join.ts": "fd78555bc34d5f188918fc7018dfe8fe2df5bbad94a3b30a433666c03934d77f", 666 + "https://deno.land/std@0.200.0/path/_normalize.ts": "a19ec8706b2707f9dd974662a5cd89fad438e62ab1857e08b314a8eb49a34d81", 667 + "https://deno.land/std@0.200.0/path/_parse.ts": "0f9b0ff43682dd9964eb1c4398610c4e165d8db9d3ac9d594220217adf480cfa", 668 + "https://deno.land/std@0.200.0/path/_relative.ts": "27bdeffb5311a47d85be26d37ad1969979359f7636c5cd9fcf05dcd0d5099dc5", 669 + "https://deno.land/std@0.200.0/path/_resolve.ts": "7a3616f1093735ed327e758313b79c3c04ea921808ca5f19ddf240cb68d0adf6", 670 + "https://deno.land/std@0.200.0/path/_to_file_url.ts": "739bfda583598790b2e77ce227f2bb618f6ebdb939788cea47555b43970ec58c", 671 + "https://deno.land/std@0.200.0/path/_to_namespaced_path.ts": "0d5f4caa2ed98ef7a8786286df6af804b50e38859ae897b5b5b4c8c5930a75c8", 672 + "https://deno.land/std@0.200.0/path/_util.ts": "4e191b1bac6b3bf0c31aab42e5ca2e01a86ab5a0d2e08b75acf8585047a86221", 673 + "https://deno.land/std@0.200.0/path/basename.ts": "6f08fbb90dbfcf320765b3abb01f995b1723f75e2534acfd5380e202c802a3aa", 674 + "https://deno.land/std@0.200.0/path/common.ts": "ee7505ab01fd22de3963b64e46cff31f40de34f9f8de1fff6a1bd2fe79380000", 675 + "https://deno.land/std@0.200.0/path/dirname.ts": "098996822a31b4c46e1eb52a19540d3c6f9f54b772fc8a197939eeabc29fca2f", 676 + "https://deno.land/std@0.200.0/path/extname.ts": "9b83c62fd16505739541f7a3ab447d8972da39dbf668d47af2f93206c2480893", 677 + "https://deno.land/std@0.200.0/path/format.ts": "cb22f95cc7853d590b87708cc9441785e760d711188facff3d225305a8213aca", 678 + "https://deno.land/std@0.200.0/path/from_file_url.ts": "a6221cfc928928ec4d9786d767dfac98fa2ab746af0786446c9834a07b98817e", 679 + "https://deno.land/std@0.200.0/path/glob.ts": "d479e0a695621c94d3fd7fe7abd4f9499caf32a8de13f25073451c6ef420a4e1", 680 + "https://deno.land/std@0.200.0/path/is_absolute.ts": "6b3d36352eb7fa29edb53f9e7b09b1aeb022a3c5465764f6cc5b8c41f9736197", 681 + "https://deno.land/std@0.200.0/path/join.ts": "4a2867ff2f3c81ffc9eb3d56dade16db6f8bd3854f269306d23dad4115089c84", 682 + "https://deno.land/std@0.200.0/path/mod.ts": "7765507696cb321994cdacfc19ee3ba61e8e3ebf4bd98fa75a276cf5dc18ce2a", 683 + "https://deno.land/std@0.200.0/path/normalize.ts": "7d992cd262b2deefa842d93a8ba2ed51f3949ba595b1d07f627ac2cddbc74808", 684 + "https://deno.land/std@0.200.0/path/parse.ts": "031fe488b3497fb8312fc1dc3c3d6c2d80707edd9c661e18ee9fd20f95edf322", 685 + "https://deno.land/std@0.200.0/path/posix.ts": "0a1c1952d132323a88736d03e92bd236f3ed5f9f079e5823fae07c8d978ee61b", 686 + "https://deno.land/std@0.200.0/path/relative.ts": "7db80c5035016174267da16321a742d76e875215c317859a383b12f413c6f5d6", 687 + "https://deno.land/std@0.200.0/path/resolve.ts": "103b62207726a27f28177f397008545804ecb20aaf00623af1f622b18cd80b9f", 688 + "https://deno.land/std@0.200.0/path/separator.ts": "0fb679739d0d1d7bf45b68dacfb4ec7563597a902edbaf3c59b50d5bcadd93b1", 689 + "https://deno.land/std@0.200.0/path/to_file_url.ts": "dd32f7a01bbf3b15b5df46796659984b372973d9b2d7d59bcf0eb990763a0cb5", 690 + "https://deno.land/std@0.200.0/path/to_namespaced_path.ts": "4e643ab729bf49ccdc166ad48d2de262ff462938fcf2a44a4425588f4a0bd690", 691 + "https://deno.land/std@0.200.0/path/win32.ts": "8b3f80ef7a462511d5e8020ff490edcaa0a0d118f1b1e9da50e2916bdd73f9dd", 652 692 "https://deno.land/std@0.208.0/assert/_constants.ts": "8a9da298c26750b28b326b297316cdde860bc237533b07e1337c021379e6b2a9", 653 693 "https://deno.land/std@0.208.0/assert/_diff.ts": "58e1461cc61d8eb1eacbf2a010932bf6a05b79344b02ca38095f9b805795dc48", 654 694 "https://deno.land/std@0.208.0/assert/_format.ts": "a69126e8a469009adf4cf2a50af889aca364c349797e63174884a52ff75cf4c7",
+43 -97
packages/cli/src/utils/lexicon.ts
··· 1 1 import { walk } from "@std/fs/walk"; 2 2 import { extname } from "@std/path"; 3 - import { green, red, dim } from "@std/fmt/colors"; 4 - import { LexiconValidator, type LexiconDoc } from "@slices/lexicon"; 3 + import { green, red, dim, cyan } from "@std/fmt/colors"; 4 + import { type LexiconDoc, validateLexiconsAndGetErrors } from "@slices/lexicon"; 5 5 import { logger } from "./logger.ts"; 6 6 7 7 // Type for raw lexicon content that may include unknown fields ··· 81 81 }; 82 82 } 83 83 84 - // Use @slices/lexicon for validation 85 - // Pass the original lexicon to preserve all fields for validation 86 - try { 87 - const validator = await LexiconValidator.create([lexicon as RawLexicon]); 88 - try { 89 - validator.validateLexiconSetCompleteness(); 90 - validator.free(); // Clean up resources 91 - return { valid: true }; 92 - } catch (validationError) { 93 - validator.free(); // Clean up resources 94 - const errorMessage = validationError instanceof Error ? validationError.message : String(validationError); 95 - return { valid: false, errors: [errorMessage] }; 96 - } 97 - } catch (validationError) { 98 - // Lexicon validation failed - report the actual error 99 - const errorMessage = validationError instanceof Error ? validationError.message : String(validationError); 100 - return { valid: false, errors: [errorMessage] }; 84 + // Use static validation method 85 + const errorsByLexiconId = await validateLexiconsAndGetErrors([ 86 + lexicon as RawLexicon, 87 + ]); 88 + const errors = errorsByLexiconId.get(lex.id as string); 89 + 90 + if (errors && errors.length > 0) { 91 + return { valid: false, errors }; 101 92 } 93 + 94 + return { valid: true }; 102 95 } catch (error) { 103 96 const err = error as Error; 104 97 return { valid: false, errors: [`Validation error: ${err.message}`] }; ··· 110 103 showProgress = true 111 104 ): Promise<LexiconValidationResult> { 112 105 const files: LexiconFile[] = []; 113 - const validLexicons: RawLexicon[] = []; // Store valid original lexicon objects 106 + const lexicons: RawLexicon[] = []; 114 107 115 - // First pass: read and parse all files 108 + // Read and parse all files 116 109 for (let i = 0; i < filePaths.length; i++) { 117 110 const filePath = filePaths[i]; 118 111 ··· 159 152 continue; 160 153 } 161 154 162 - // Validate individual lexicon first 163 - // Pass the original content to preserve all fields for validation 164 - try { 165 - const individualValidator = await LexiconValidator.create([content as RawLexicon]); 166 - individualValidator.free(); // Clean up immediately 167 - 168 - validLexicons.push(content as RawLexicon); // Store valid original content 169 - files.push({ 170 - path: filePath, 171 - content, 172 - valid: true, 173 - errors: [], 174 - }); 175 - } catch (validationError) { 176 - // Individual lexicon validation failed 177 - const errorMessage = validationError instanceof Error ? validationError.message : String(validationError); 178 - files.push({ 179 - path: filePath, 180 - content, 181 - valid: false, 182 - errors: [errorMessage], 183 - }); 184 - } 155 + // Store for validation 156 + lexicons.push(content as RawLexicon); 157 + files.push({ 158 + path: filePath, 159 + content, 160 + valid: true, 161 + errors: [], 162 + }); 185 163 } catch (error) { 186 164 const err = error as Error; 187 165 files.push({ ··· 193 171 } 194 172 } 195 173 196 - // Second pass: validate the complete set together (only for individually valid files) 197 - if (validLexicons.length > 0) { 174 + // Validate all lexicons together (structural + cross-reference in one pass) 175 + if (lexicons.length > 0) { 198 176 try { 199 - const validator = await LexiconValidator.create(validLexicons); 200 - try { 201 - validator.validateLexiconSetCompleteness(); 202 - validator.free(); 203 - // All lexicons in the set are valid 204 - } catch (validationError) { 205 - validator.free(); 206 - const errorMessage = validationError instanceof Error ? validationError.message : String(validationError); 177 + const errorsByLexiconId = await validateLexiconsAndGetErrors(lexicons); 207 178 208 - // Mark all individually valid files as invalid due to set validation failure 209 - for (const file of files) { 210 - if (file.valid) { 179 + // Map errors back to files 180 + for (const file of files) { 181 + if (file.valid && file.content) { 182 + const lexicon = file.content as RawLexicon; 183 + const errors = errorsByLexiconId.get(lexicon.id); 184 + if (errors && errors.length > 0) { 211 185 file.valid = false; 212 - file.errors = [`Set validation failed: ${errorMessage}`]; 186 + file.errors = errors; 213 187 } 214 188 } 215 189 } 216 - } catch (validationError) { 217 - // This shouldn't happen since we already validated individual lexicons 218 - // But if it does, it's likely a set-level issue 219 - const errorMessage = validationError instanceof Error ? validationError.message : String(validationError); 220 - 221 - // Mark only individually valid files as invalid due to set creation failure 190 + } catch (error) { 191 + const errorMessage = 192 + error instanceof Error ? error.message : String(error); 193 + // Mark all valid files as invalid due to validation failure 222 194 for (const file of files) { 223 195 if (file.valid) { 224 196 file.valid = false; 225 - file.errors = [`Set creation failed: ${errorMessage}`]; 197 + file.errors = [`Validation failed: ${errorMessage}`]; 226 198 } 227 199 } 228 200 } 229 201 } 230 202 231 - const validFiles = files.filter(f => f.valid).length; 203 + const validFiles = files.filter((f) => f.valid).length; 232 204 const invalidFiles = files.length - validFiles; 233 205 234 206 return { ··· 239 211 }; 240 212 } 241 213 242 - // ANSI color codes 243 - const colors = { 244 - reset: '\x1b[0m', 245 - red: '\x1b[31m', 246 - green: '\x1b[32m', 247 - yellow: '\x1b[33m', 248 - blue: '\x1b[34m', 249 - magenta: '\x1b[35m', 250 - cyan: '\x1b[36m', 251 - dim: '\x1b[2m', 252 - }; 253 - 254 214 function colorizeErrorPaths(errorMessage: string): string { 255 215 // Highlight field paths in quotes with cyan color 256 - return errorMessage.replace(/'([^']+)'/g, `${colors.cyan}'$1'${colors.reset}`); 216 + return errorMessage.replace( 217 + /'([^']+)'/g, 218 + (_match, p1) => cyan(`'${p1}'`) 219 + ); 257 220 } 258 221 259 222 function formatError(error: string, index: number): string { 260 - // Handle "Multiple validation errors:" by extracting individual errors 261 - if (error.includes("Multiple validation errors:")) { 262 - const lines = error.split('\n'); 263 - const errors: string[] = []; 264 - 265 - for (const line of lines) { 266 - const trimmed = line.trim(); 267 - if (trimmed.startsWith('- ')) { 268 - errors.push(trimmed.substring(2)); // Remove "- " prefix 269 - } 270 - } 271 - 272 - return errors.map((err, i) => 273 - ` ${colors.red}${index + 1}.${i + 1}${colors.reset} ${colorizeErrorPaths(err)}` 274 - ).join('\n'); 275 - } else { 276 - return ` ${colors.red}${index + 1}.${colors.reset} ${colorizeErrorPaths(error)}`; 277 - } 223 + return ` ${red(`${index + 1}.`)} ${colorizeErrorPaths(error)}`; 278 224 } 279 225 280 226 export function printValidationSummary(result: LexiconValidationResult): void {
+30 -15
packages/lexicon-intellisense/README.md
··· 1 1 # Lexicon IntelliSense 2 2 3 - VS Code extension providing IntelliSense support for AT Protocol lexicon JSON files using the `@slices/lexicon` validation library. 3 + VS Code extension providing IntelliSense support for AT Protocol lexicon JSON 4 + files. 4 5 5 6 ## Features 6 7 7 - - **Real-time validation**: Validates lexicon files as you type using the WASM-based `@slices/lexicon` validator 8 - - **JSON Schema support**: Provides autocomplete and validation using comprehensive JSON schemas 9 - - **Cross-lexicon validation**: Validates references between lexicon files in your workspace 10 - - **Error diagnostics**: Shows validation errors directly in the editor with precise location information 11 - - **Workspace validation**: Command to validate all lexicon files in your workspace 8 + - **Real-time validation**: Validates lexicon files as you type 9 + - **JSON Schema support**: Provides autocomplete and validation using 10 + comprehensive JSON schemas 11 + - **Cross-lexicon validation**: Validates references between lexicon files in 12 + your workspace 13 + - **Error diagnostics**: Shows validation errors directly in the editor with 14 + location information 15 + - **Workspace validation**: Command to validate all lexicon files in your 16 + workspace 12 17 13 18 ## Installation 14 19 ··· 31 36 32 37 The extension can be configured in VS Code settings: 33 38 34 - - `lexiconIntelliSense.enableValidation`: Enable/disable lexicon validation (default: true) 35 - - `lexiconIntelliSense.lexiconDirectory`: Directory containing lexicon files relative to workspace root (default: "lexicons") 39 + - `lexiconIntelliSense.enableValidation`: Enable/disable lexicon validation 40 + (default: true) 41 + - `lexiconIntelliSense.lexiconDirectory`: Directory containing lexicon files 42 + relative to workspace root (default: "lexicons") 36 43 37 44 ## Usage 38 45 39 46 ### Automatic Validation 40 47 41 48 The extension automatically validates: 49 + 42 50 - JSON files in directories containing "lexicons" in the path 43 51 - JSON files that contain lexicon-like structure (have `id` and `defs` fields) 44 52 45 53 ### Manual Validation 46 54 47 55 Use the command palette (Ctrl+Shift+P / Cmd+Shift+P): 56 + 48 57 - `Lexicon: Validate Current File` - Validates the currently open lexicon file 49 58 - `Lexicon: Validate Workspace` - Validates all lexicon files in the workspace 50 59 51 60 ### Features in Action 52 61 53 - 1. **Structure validation**: Ensures lexicon files have required `id` and `defs` fields 62 + 1. **Structure validation**: Ensures lexicon files have required `id` and `defs` 63 + fields 54 64 2. **Type checking**: Validates that definition types are correct 55 - 3. **Reference validation**: Checks that references to other lexicons can be resolved 56 - 4. **Format validation**: Validates string formats like datetime, uri, nsid, etc. 65 + 3. **Reference validation**: Checks that references to other lexicons can be 66 + resolved 67 + 4. **Format validation**: Validates string formats like datetime, uri, nsid, 68 + etc. 57 69 58 70 ## File Structure 59 71 ··· 85 97 3. Open a workspace with lexicon files 86 98 4. Test validation and IntelliSense features 87 99 88 - ### WASM Integration 100 + ### Validation 89 101 90 - The extension uses WASM files compiled from the Rust-based `lexicon-rs` package for validation. These files are copied during build and provide the same validation logic used by the CLI tools. 102 + The extension provides comprehensive AT Protocol lexicon validation including 103 + structure validation, type checking, reference validation, and format 104 + validation. 91 105 92 106 ## Commands 93 107 94 108 - `lexiconIntelliSense.validateFile`: Validate the current lexicon file 95 - - `lexiconIntelliSense.validateWorkspace`: Validate all lexicon files in workspace 109 + - `lexiconIntelliSense.validateWorkspace`: Validate all lexicon files in 110 + workspace 96 111 97 112 ## License 98 113 99 - MIT 114 + MIT
+2 -3
packages/lexicon-intellisense/package.json
··· 1 1 { 2 2 "name": "lexicon-intellisense", 3 - "version": "0.1.0", 3 + "version": "0.1.2", 4 4 "description": "VS Code IntelliSense support for AT Protocol lexicon JSON files", 5 5 "main": "./out/extension.js", 6 6 "license": "MIT", ··· 62 62 "vscode:prepublish": "npm run build", 63 63 "compile": "tsc -p ./", 64 64 "watch": "tsc -watch -p ./", 65 - "build": "npm run copy:wasm && npm run compile", 66 - "copy:wasm": "mkdir -p wasm && cp ../lexicon-rs/pkg/* wasm/" 65 + "build": "cd ../lexicon-rs && wasm-pack build --target web --features wasm && cp pkg/* ../lexicon-intellisense/wasm/" 67 66 }, 68 67 "devDependencies": { 69 68 "@types/vscode": "^1.80.0",
+2 -2
packages/lexicon-intellisense/schemas/lexicon-schema.json
··· 37 37 "properties": { 38 38 "type": { 39 39 "type": "string", 40 - "enum": ["record", "object", "string", "integer", "boolean", "array", "ref", "union", "blob", "bytes", "cid-link", "unknown"] 40 + "enum": ["record", "query", "procedure", "subscription", "object", "string", "integer", "boolean", "array", "ref", "union", "blob", "bytes", "cid-link", "unknown", "token", "null", "params"] 41 41 }, 42 42 "description": { 43 43 "type": "string" ··· 58 58 "$ref": "#/$defs/objectDef" 59 59 } 60 60 }, 61 - "required": ["record"] 61 + "required": ["record", "key"] 62 62 } 63 63 }, 64 64 {
+160 -299
packages/lexicon-intellisense/src/language-server.ts
··· 1 - import * as vscode from 'vscode'; 2 - import * as path from 'path'; 3 - import * as fs from 'fs'; 1 + import * as vscode from "vscode"; 2 + import * as path from "path"; 3 + import * as fs from "fs"; 4 4 import { 5 5 LanguageClient, 6 6 LanguageClientOptions, 7 7 ServerOptions, 8 - TransportKind 9 - } from 'vscode-languageclient/node'; 10 - import { LexiconValidator, LexiconDoc, ValidationError } from './lexicon-validator'; 8 + TransportKind, 9 + } from "vscode-languageclient/node"; 10 + import { 11 + LexiconValidator, 12 + LexiconDoc, 13 + ValidationError, 14 + validateLexiconsAndGetErrors, 15 + } from "./lexicon-validator"; 11 16 12 17 export class LexiconLanguageServer { 13 18 private client: LanguageClient | undefined; ··· 16 21 17 22 constructor(context: vscode.ExtensionContext) { 18 23 this.context = context; 19 - this.diagnosticCollection = vscode.languages.createDiagnosticCollection('lexicon'); 24 + this.diagnosticCollection = 25 + vscode.languages.createDiagnosticCollection("lexicon"); 20 26 context.subscriptions.push(this.diagnosticCollection); 21 27 } 22 28 23 29 async start() { 24 30 // Set up document change listeners for real-time validation 25 31 const documentSelector = [ 26 - { scheme: 'file', language: 'json', pattern: '**/lexicons/**/*.json' } 32 + { scheme: "file", language: "json", pattern: "**/lexicons/**/*.json" }, 27 33 ]; 28 34 29 35 // Listen for document changes 30 - const changeListener = vscode.workspace.onDidChangeTextDocument(async (event) => { 31 - if (this.isLexiconDocument(event.document)) { 32 - await this.validateDocument(event.document); 33 - } 34 - }); 35 - 36 - // Listen for document opens 37 - const openListener = vscode.workspace.onDidOpenTextDocument(async (document) => { 38 - if (this.isLexiconDocument(document)) { 39 - await this.validateDocument(document); 36 + const changeListener = vscode.workspace.onDidChangeTextDocument( 37 + async (event) => { 38 + if (this.isLexiconDocument(event.document)) { 39 + await this.validateDocument(event.document); 40 + } 40 41 } 41 - }); 42 + ); 42 43 43 44 // Listen for document saves 44 - const saveListener = vscode.workspace.onDidSaveTextDocument(async (document) => { 45 - if (this.isLexiconDocument(document)) { 46 - await this.validateDocument(document); 45 + const saveListener = vscode.workspace.onDidSaveTextDocument( 46 + async (document) => { 47 + if (this.isLexiconDocument(document)) { 48 + await this.validateDocument(document); 49 + // Re-run workspace validation to check cross-references 50 + if (vscode.workspace.workspaceFolders) { 51 + for (const workspaceFolder of vscode.workspace.workspaceFolders) { 52 + await this.validateWorkspace(workspaceFolder.uri); 53 + } 54 + } 55 + } 47 56 } 48 - }); 57 + ); 49 58 50 - this.context.subscriptions.push(changeListener, openListener, saveListener); 59 + this.context.subscriptions.push(changeListener, saveListener); 51 60 52 - // Validate all open lexicon documents 53 - for (const document of vscode.workspace.textDocuments) { 54 - if (this.isLexiconDocument(document)) { 55 - await this.validateDocument(document); 61 + // Validate all lexicon documents in the workspace 62 + if (vscode.workspace.workspaceFolders) { 63 + for (const workspaceFolder of vscode.workspace.workspaceFolders) { 64 + await this.validateWorkspace(workspaceFolder.uri); 56 65 } 57 66 } 58 67 } ··· 74 83 } 75 84 76 85 async validateDocument(document: vscode.TextDocument) { 77 - console.log('validateDocument called for:', document.uri.fsPath); 78 - 79 - const config = vscode.workspace.getConfiguration('lexiconIntelliSense'); 80 - if (!config.get<boolean>('enableValidation', true)) { 81 - console.log('Validation disabled in config'); 86 + const config = vscode.workspace.getConfiguration("lexiconIntelliSense"); 87 + if (!config.get<boolean>("enableValidation", true)) { 88 + console.log("Validation disabled in config"); 82 89 return; 83 90 } 84 91 85 92 const diagnostics: vscode.Diagnostic[] = []; 86 - console.log('Starting validation...'); 87 93 88 94 try { 89 95 const content = document.getText(); 90 96 const lexicon = JSON.parse(content); 91 97 92 98 // Basic structure validation 93 - if (!lexicon || typeof lexicon !== 'object') { 94 - diagnostics.push(this.createDiagnostic( 95 - new vscode.Range(0, 0, 0, 0), 96 - 'Lexicon must be an object', 97 - vscode.DiagnosticSeverity.Error 98 - )); 99 + if (!lexicon || typeof lexicon !== "object") { 100 + diagnostics.push( 101 + this.createDiagnostic( 102 + new vscode.Range(0, 0, 0, 0), 103 + "Lexicon must be an object", 104 + vscode.DiagnosticSeverity.Error 105 + ) 106 + ); 99 107 } else { 100 108 // Validate required fields 101 - if (!lexicon.id || typeof lexicon.id !== 'string') { 102 - const range = this.findFieldRange(document, 'id') || this.findLineWithText(document, '"id"') || new vscode.Range(0, 0, 0, 0); 103 - diagnostics.push(this.createDiagnostic( 104 - range, 105 - 'Lexicon must have a valid "id" field', 106 - vscode.DiagnosticSeverity.Error 107 - )); 108 - } 109 - 110 - if (!lexicon.defs || typeof lexicon.defs !== 'object') { 111 - const range = this.findFieldRange(document, 'defs') || this.findLineWithText(document, '"defs"') || new vscode.Range(0, 0, 0, 0); 112 - diagnostics.push(this.createDiagnostic( 113 - range, 114 - 'Lexicon must have a "defs" object', 115 - vscode.DiagnosticSeverity.Error 116 - )); 109 + if (!lexicon.id || typeof lexicon.id !== "string") { 110 + diagnostics.push( 111 + this.createDiagnostic( 112 + new vscode.Range(0, 0, 0, 0), 113 + 'Lexicon must have a valid "id" field', 114 + vscode.DiagnosticSeverity.Error 115 + ) 116 + ); 117 117 } 118 118 119 - // Use WASM validator for deep validation 120 - if (lexicon.id && lexicon.defs) { 121 - console.log('Running WASM validation for lexicon:', lexicon.id); 122 - try { 123 - const validator = await LexiconValidator.create([lexicon as LexiconDoc]); 124 - try { 125 - validator.validateLexiconSetCompleteness(); 126 - console.log('WASM validation passed'); 127 - } catch (validationError) { 128 - console.log('WASM validation failed:', validationError); 129 - const errorMessage = validationError instanceof Error ? validationError.message : String(validationError); 130 - const range = this.parseErrorLocationFromMessage(document, errorMessage) || this.findFieldRange(document, 'defs') || new vscode.Range(0, 0, 0, 0); 131 - diagnostics.push(this.createDiagnostic( 132 - range, 133 - `Lexicon validation error: ${errorMessage}`, 134 - vscode.DiagnosticSeverity.Error 135 - )); 136 - } finally { 137 - validator.free(); 138 - } 139 - } catch (error) { 140 - console.log('WASM validator creation failed:', error); 141 - const errorMessage = error instanceof Error ? error.message : String(error); 142 - console.log('Parsing error location for:', errorMessage); 143 - const range = this.parseErrorLocationFromMessage(document, errorMessage) || this.findFieldRange(document, 'id') || new vscode.Range(0, 0, 0, 0); 144 - console.log('Error range found:', range); 145 - diagnostics.push(this.createDiagnostic( 146 - range, 147 - `Lexicon validation failed: ${errorMessage}`, 119 + if (!lexicon.defs || typeof lexicon.defs !== "object") { 120 + diagnostics.push( 121 + this.createDiagnostic( 122 + new vscode.Range(0, 0, 0, 0), 123 + 'Lexicon must have a "defs" object', 148 124 vscode.DiagnosticSeverity.Error 149 - )); 150 - } 151 - } else { 152 - console.log('Skipping WASM validation - missing id or defs'); 125 + ) 126 + ); 153 127 } 154 128 } 155 129 } catch (parseError) { 156 - diagnostics.push(this.createDiagnostic( 157 - new vscode.Range(0, 0, 0, 0), 158 - `Invalid JSON: ${parseError instanceof Error ? parseError.message : String(parseError)}`, 159 - vscode.DiagnosticSeverity.Error 160 - )); 130 + diagnostics.push( 131 + this.createDiagnostic( 132 + new vscode.Range(0, 0, 0, 0), 133 + `Invalid JSON: ${ 134 + parseError instanceof Error 135 + ? parseError.message 136 + : String(parseError) 137 + }`, 138 + vscode.DiagnosticSeverity.Error 139 + ) 140 + ); 161 141 } 162 142 163 - console.log('Setting diagnostics:', diagnostics.length, 'errors found'); 143 + console.log("Setting diagnostics:", diagnostics.length, "errors found"); 164 144 this.diagnosticCollection.set(document.uri, diagnostics); 165 145 } 166 146 167 147 async validateWorkspace(workspaceUri: vscode.Uri) { 168 - const config = vscode.workspace.getConfiguration('lexiconIntelliSense'); 169 - const lexiconDir = config.get<string>('lexiconDirectory', 'lexicons'); 148 + const config = vscode.workspace.getConfiguration("lexiconIntelliSense"); 149 + const lexiconDir = config.get<string>("lexiconDirectory", "lexicons"); 170 150 171 151 const lexiconPath = path.join(workspaceUri.fsPath, lexiconDir); 172 152 if (!fs.existsSync(lexiconPath)) { 173 - vscode.window.showWarningMessage(`Lexicon directory not found: ${lexiconPath}`); 153 + vscode.window.showWarningMessage( 154 + `Lexicon directory not found: ${lexiconPath}` 155 + ); 174 156 return; 175 157 } 176 158 177 159 const lexiconFiles = await this.findLexiconFiles(lexiconPath); 160 + 161 + // First do basic validation for each file 162 + for (const filePath of lexiconFiles) { 163 + try { 164 + const uri = vscode.Uri.file(filePath); 165 + const document = await vscode.workspace.openTextDocument(uri); 166 + await this.validateDocument(document); 167 + } catch (error) { 168 + console.warn(`Failed to validate lexicon ${filePath}:`, error); 169 + } 170 + } 171 + 172 + // Load all lexicons for cross-reference validation 178 173 const lexicons: LexiconDoc[] = []; 174 + const idToFileMap = new Map<string, string>(); 179 175 180 - // Load all lexicons 181 176 for (const filePath of lexiconFiles) { 182 177 try { 183 - const content = fs.readFileSync(filePath, 'utf8'); 178 + const content = fs.readFileSync(filePath, "utf8"); 184 179 const lexicon = JSON.parse(content); 180 + console.log(lexicon.id, "loaded from", filePath); 185 181 if (lexicon.id && lexicon.defs) { 186 182 lexicons.push(lexicon as LexiconDoc); 183 + idToFileMap.set(lexicon.id, filePath); 187 184 } 188 185 } catch (error) { 186 + debugger; 189 187 console.warn(`Failed to load lexicon ${filePath}:`, error); 190 188 } 191 189 } 192 190 193 - // Validate the complete set 191 + // Validate all lexicons and get errors by lexicon ID 194 192 if (lexicons.length > 0) { 195 193 try { 196 - const validator = await LexiconValidator.create(lexicons); 197 - try { 198 - validator.validateLexiconSetCompleteness(); 199 - vscode.window.showInformationMessage(`Successfully validated ${lexicons.length} lexicon files`); 200 - } catch (validationError) { 201 - vscode.window.showErrorMessage(`Set validation failed: ${validationError instanceof Error ? validationError.message : String(validationError)}`); 202 - } finally { 203 - validator.free(); 194 + // Use static validation that returns errors instead of throwing 195 + const errorsByLexiconId = await validateLexiconsAndGetErrors(lexicons); 196 + 197 + // Attribute errors to specific files 198 + for (const [lexiconId, errors] of errorsByLexiconId) { 199 + const filePath = idToFileMap.get(lexiconId); 200 + if (filePath) { 201 + const uri = vscode.Uri.file(filePath); 202 + const existingDiagnostics = 203 + this.diagnosticCollection.get(uri) || []; 204 + const diagnostics = [...existingDiagnostics]; 205 + 206 + // Add each error as a diagnostic 207 + for (const error of errors) { 208 + diagnostics.push( 209 + this.createDiagnostic( 210 + new vscode.Range(0, 0, 0, 0), 211 + error, 212 + vscode.DiagnosticSeverity.Error 213 + ) 214 + ); 215 + } 216 + 217 + this.diagnosticCollection.set(uri, diagnostics); 218 + } 219 + } 220 + 221 + const totalErrors = Array.from(errorsByLexiconId.values()).reduce( 222 + (sum, errors) => sum + errors.length, 223 + 0 224 + ); 225 + if (totalErrors === 0) { 226 + console.log( 227 + `Successfully validated ${lexicons.length} lexicon files` 228 + ); 229 + } else { 230 + console.log( 231 + `Validation found ${totalErrors} errors across ${errorsByLexiconId.size} lexicons` 232 + ); 204 233 } 205 234 } catch (error) { 206 - vscode.window.showErrorMessage(`WASM validation failed: ${error instanceof Error ? error.message : String(error)}`); 235 + console.warn( 236 + `WASM validation failed: ${ 237 + error instanceof Error ? error.message : String(error) 238 + }` 239 + ); 207 240 } 208 241 } 209 242 } ··· 217 250 const fullPath = path.join(dir, entry.name); 218 251 if (entry.isDirectory()) { 219 252 await readDir(fullPath); 220 - } else if (entry.name.endsWith('.json')) { 253 + } else if (entry.name.endsWith(".json")) { 221 254 files.push(fullPath); 222 255 } 223 256 } ··· 229 262 230 263 private isLexiconDocument(document: vscode.TextDocument): boolean { 231 264 const filePath = document.uri.fsPath; 232 - const isInLexiconsDir = filePath.includes('/lexicons/') || filePath.includes('\\lexicons\\'); 265 + const isInLexiconsDir = 266 + filePath.includes("/lexicons/") || filePath.includes("\\lexicons\\"); 233 267 234 - if (isInLexiconsDir && document.languageId === 'json') { 268 + if (isInLexiconsDir && document.languageId === "json") { 235 269 return true; 236 270 } 237 271 238 272 // Check if JSON content looks like a lexicon 239 273 try { 240 274 const content = JSON.parse(document.getText()); 241 - return content && typeof content === 'object' && 242 - typeof content.id === 'string' && 243 - content.defs; 275 + return ( 276 + content && 277 + typeof content === "object" && 278 + typeof content.id === "string" && 279 + content.defs 280 + ); 244 281 } catch { 245 282 return false; 246 283 } 247 284 } 248 285 249 - private findFieldRange(document: vscode.TextDocument, fieldName: string): vscode.Range | null { 250 - const text = document.getText(); 251 - const regex = new RegExp(`"${fieldName}"\\s*:`, 'g'); 252 - let match; 253 - const matches = []; 254 - 255 - // Find all matches 256 - while ((match = regex.exec(text)) !== null) { 257 - matches.push(match); 258 - } 259 - 260 - if (matches.length === 0) return null; 261 - 262 - // If only one match, use it 263 - if (matches.length === 1) { 264 - const startPos = document.positionAt(matches[0].index); 265 - const endPos = document.positionAt(matches[0].index + matches[0][0].length); 266 - return new vscode.Range(startPos, endPos); 267 - } 268 - 269 - // If multiple matches, try to find the one in properties context 270 - for (const match of matches) { 271 - const beforeMatch = text.substring(0, match.index); 272 - // Look for "properties" keyword before this match 273 - if (beforeMatch.includes('"properties"')) { 274 - const startPos = document.positionAt(match.index); 275 - const endPos = document.positionAt(match.index + match[0].length); 276 - return new vscode.Range(startPos, endPos); 277 - } 278 - } 279 - 280 - // Fallback to first match 281 - const startPos = document.positionAt(matches[0].index); 282 - const endPos = document.positionAt(matches[0].index + matches[0][0].length); 283 - return new vscode.Range(startPos, endPos); 284 - } 285 - 286 - private findLineWithText(document: vscode.TextDocument, searchText: string): vscode.Range | null { 287 - const text = document.getText(); 288 - const lines = text.split('\n'); 289 - 290 - for (let i = 0; i < lines.length; i++) { 291 - if (lines[i].includes(searchText)) { 292 - const start = new vscode.Position(i, 0); 293 - const end = new vscode.Position(i, lines[i].length); 294 - return new vscode.Range(start, end); 295 - } 296 - } 297 - 298 - return null; 299 - } 300 - 301 - private parseErrorLocationFromMessage(document: vscode.TextDocument, errorMessage: string): vscode.Range | null { 302 - // Try to extract field names from error messages 303 - // Common patterns: 'Property "main.title"', "'fieldName'", "at path 'defs.main.type'" 304 - 305 - // Look for Property paths like "main.title" and map them to actual JSON structure 306 - const propertyMatch = errorMessage.match(/Property '([^']+)'/); 307 - if (propertyMatch) { 308 - console.log('Found Property match:', propertyMatch[1]); 309 - const propertyPath = propertyMatch[1]; 310 - 311 - // For "main.title" -> just look for "title" in the properties section 312 - if (propertyPath.includes('.')) { 313 - const parts = propertyPath.split('.'); 314 - const propertyName = parts[parts.length - 1]; // Get the last part (e.g., "title") 315 - console.log('Looking for property name:', propertyName); 316 - 317 - const range = this.findPropertyInContext(document, propertyName); 318 - if (range) return range; 319 - } 320 - 321 - // Fallback to original mapping 322 - const mappedPath = this.mapPropertyPathToJsonPath(propertyPath); 323 - for (const fieldName of mappedPath) { 324 - const range = this.findFieldRange(document, fieldName) || this.findLineWithText(document, `"${fieldName}"`); 325 - if (range) return range; 326 - } 327 - } 328 - 329 - // Look for string property definition errors like "String property definition 'main.releaseDate'" 330 - const stringPropertyMatch = errorMessage.match(/String property definition '([^']+)'/); 331 - if (stringPropertyMatch) { 332 - console.log('Found String property definition match:', stringPropertyMatch[1]); 333 - const propertyPath = stringPropertyMatch[1]; 334 - 335 - if (propertyPath.includes('.')) { 336 - const parts = propertyPath.split('.'); 337 - const propertyName = parts[parts.length - 1]; // Get the last part (e.g., "releaseDate") 338 - console.log('Looking for string property name:', propertyName); 339 - 340 - const range = this.findPropertyInContext(document, propertyName); 341 - if (range) return range; 342 - } 343 - } 344 - 345 - // Look for quoted field names 346 - const quotedFieldMatch = errorMessage.match(/'([^']+)'/); 347 - if (quotedFieldMatch) { 348 - const fieldPath = quotedFieldMatch[1]; 349 - 350 - // Try to find this field in the document 351 - const range = this.findFieldRange(document, fieldPath) || 352 - this.findLineWithText(document, `"${fieldPath}"`); 353 - if (range) return range; 354 - } 355 - 356 - // Look for "type" errors - often in definitions 357 - if (errorMessage.toLowerCase().includes('type')) { 358 - const typeRange = this.findLineWithText(document, '"type"'); 359 - if (typeRange) return typeRange; 360 - } 361 - 362 - // Look for "defs" errors 363 - if (errorMessage.toLowerCase().includes('defs') || errorMessage.toLowerCase().includes('definition')) { 364 - const defsRange = this.findFieldRange(document, 'defs'); 365 - if (defsRange) return defsRange; 366 - } 367 - 368 - return null; 369 - } 370 - 371 - private mapPropertyPathToJsonPath(propertyPath: string): string[] { 372 - // Map paths like "main.{property}" to actual JSON structure "defs.main.record.properties.{property}" 373 - // This helps find the actual field in the JSON structure 374 - 375 - const parts = propertyPath.split('.'); 376 - 377 - if (parts.length === 2) { 378 - // "main.title" -> this is actually "defs.main.record.properties.title" in the JSON 379 - const [defName, propertyName] = parts; 380 - 381 - // Try to find the specific property first, then fallback to broader locations 382 - return [ 383 - propertyName, // Look for "title" directly 384 - 'properties', // Look for "properties" section 385 - 'record', // Look for "record" section 386 - defName, // Look for "main" 387 - 'defs' // Look for "defs" 388 - ]; 389 - } else if (parts.length === 1) { 390 - // "main" -> look for "main", then "defs" 391 - return [parts[0], 'defs']; 392 - } 393 - 394 - // For longer paths, try each part in reverse order (most specific first) 395 - return [...parts.reverse(), 'properties', 'record', 'defs']; 396 - } 397 - 398 - private findPropertyInContext(document: vscode.TextDocument, propertyName: string): vscode.Range | null { 399 - console.log('Looking for property:', propertyName); 400 - const text = document.getText(); 401 - 402 - // Find "properties" section first 403 - const propertiesMatch = text.match(/"properties"\s*:\s*{/); 404 - if (!propertiesMatch) { 405 - console.log('No properties section found'); 406 - return null; 407 - } 408 - 409 - const propertiesStart = propertiesMatch.index! + propertiesMatch[0].length; 410 - console.log('Properties section starts at:', propertiesStart); 411 - 412 - // Find the property within the properties section 413 - const propertiesSection = text.substring(propertiesStart); 414 - const propertyRegex = new RegExp(`"${propertyName}"\\s*:`); 415 - const propertyMatch = propertyRegex.exec(propertiesSection); 416 - 417 - if (propertyMatch) { 418 - const absoluteIndex = propertiesStart + propertyMatch.index; 419 - const startPos = document.positionAt(absoluteIndex); 420 - const endPos = document.positionAt(absoluteIndex + propertyMatch[0].length); 421 - console.log('Found property at line:', startPos.line + 1); 422 - return new vscode.Range(startPos, endPos); 423 - } 424 - 425 - console.log('Property not found in properties section'); 426 - return null; 427 - } 428 - 429 - private createDiagnostic(range: vscode.Range, message: string, severity: vscode.DiagnosticSeverity): vscode.Diagnostic { 286 + private createDiagnostic( 287 + range: vscode.Range, 288 + message: string, 289 + severity: vscode.DiagnosticSeverity 290 + ): vscode.Diagnostic { 430 291 const diagnostic = new vscode.Diagnostic(range, message, severity); 431 - diagnostic.source = 'lexicon-intellisense'; 292 + diagnostic.source = "lexicon-intellisense"; 432 293 return diagnostic; 433 294 } 434 - } 295 + }
+22
packages/lexicon-intellisense/src/lexicon-validator.ts
··· 83 83 } 84 84 85 85 /** 86 + * Get validation errors grouped by lexicon ID 87 + * Returns a Map where keys are lexicon IDs and values are arrays of error messages 88 + */ 89 + getValidationErrorsByLexicon(): Map<string, string[]> { 90 + const result = this.wasmValidator.get_validation_errors_by_lexicon(); 91 + const errorMap = JSON.parse(result); 92 + return new Map(Object.entries(errorMap)); 93 + } 94 + 95 + /** 86 96 * Clean up WASM resources 87 97 */ 88 98 free(): void { ··· 90 100 this.wasmValidator.free(); 91 101 } 92 102 } 103 + } 104 + 105 + /** 106 + * Validate lexicons and get errors without creating a validator instance 107 + * Returns a Map where keys are lexicon IDs and values are arrays of error messages 108 + */ 109 + export async function validateLexiconsAndGetErrors(lexicons: LexiconDoc[]): Promise<Map<string, string[]>> { 110 + await ensureWasmInit(); 111 + const lexiconsJson = JSON.stringify(lexicons); 112 + const result = wasmModule.validate_lexicons_and_get_errors(lexiconsJson); 113 + const errorMap = JSON.parse(result); 114 + return new Map(Object.entries(errorMap)); 93 115 } 94 116 95 117 /**
+65
packages/lexicon-intellisense/test-validation.js
··· 1 + const { validateLexiconsAndGetErrors } = require('./out/lexicon-validator'); 2 + 3 + // Test lexicons with validation errors 4 + const testLexicons = [ 5 + { 6 + lexicon: 1, 7 + id: "com.recordcollector.album", 8 + defs: { 9 + main: { 10 + type: "record", 11 + record: { 12 + type: "object", 13 + properties: { 14 + title: { 15 + type: "integer", 16 + default: -1, 17 + minimum: 0 18 + }, 19 + condition: { 20 + type: "string", 21 + default: "bo", 22 + enum: ["Mint", "Near Mint", "Very Good Plus", "Very Good", "Good Plus", "Good", "Fair", "Poor"] 23 + } 24 + } 25 + } 26 + } 27 + } 28 + }, 29 + { 30 + lexicon: 1, 31 + id: "com.recordcollector.review", 32 + defs: { 33 + main: { 34 + type: "record", 35 + record: { 36 + type: "object", 37 + properties: { 38 + album: { 39 + type: "ref", 40 + ref: "com.recordcollector.nonexistent#main" 41 + } 42 + } 43 + } 44 + } 45 + } 46 + } 47 + ]; 48 + 49 + async function test() { 50 + console.log('Testing validation...'); 51 + const errors = await validateLexiconsAndGetErrors(testLexicons); 52 + console.log('\nValidation errors by lexicon:'); 53 + for (const [lexiconId, errorList] of errors) { 54 + console.log(`\n${lexiconId}:`); 55 + errorList.forEach((err, i) => { 56 + console.log(` ${i + 1}. ${err}`); 57 + }); 58 + } 59 + 60 + if (errors.size === 0) { 61 + console.log('\nNo errors found!'); 62 + } 63 + } 64 + 65 + test().catch(console.error);
packages/lexicon-intellisense/wasm/lexicon_validator_bg.wasm

This is a binary file and will not be displayed.

+12
packages/lexicon-intellisense/wasm/slices_lexicon.d.ts
··· 1 1 /* tslint:disable */ 2 2 /* eslint-disable */ 3 3 export function main(): void; 4 + /** 5 + * Validate lexicons and return errors without creating a validator instance 6 + * Returns a JSON string containing a map of lexicon ID to error arrays 7 + */ 8 + export function validate_lexicons_and_get_errors(lexicons_json: string): string; 4 9 export function validate_string_format(value: string, format: string): void; 5 10 export function is_valid_nsid(nsid: string): boolean; 6 11 export class WasmLexiconValidator { ··· 19 24 * Validate that all cross-lexicon references can be resolved 20 25 */ 21 26 validate_lexicon_set_completeness(): void; 27 + /** 28 + * Get validation errors grouped by lexicon ID 29 + * Returns a JSON string containing a map of lexicon ID to error arrays 30 + */ 31 + get_validation_errors_by_lexicon(): string; 22 32 } 23 33 24 34 export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; ··· 29 39 readonly wasmlexiconvalidator_new: (a: number, b: number) => [number, number, number]; 30 40 readonly wasmlexiconvalidator_validate_record: (a: number, b: number, c: number, d: number, e: number) => [number, number]; 31 41 readonly wasmlexiconvalidator_validate_lexicon_set_completeness: (a: number) => [number, number]; 42 + readonly wasmlexiconvalidator_get_validation_errors_by_lexicon: (a: number) => [number, number]; 43 + readonly validate_lexicons_and_get_errors: (a: number, b: number) => [number, number]; 32 44 readonly validate_string_format: (a: number, b: number, c: number, d: number) => [number, number]; 33 45 readonly is_valid_nsid: (a: number, b: number) => number; 34 46 readonly main: () => void;
+38
packages/lexicon-intellisense/wasm/slices_lexicon.js
··· 105 105 return value; 106 106 } 107 107 /** 108 + * Validate lexicons and return errors without creating a validator instance 109 + * Returns a JSON string containing a map of lexicon ID to error arrays 110 + * @param {string} lexicons_json 111 + * @returns {string} 112 + */ 113 + export function validate_lexicons_and_get_errors(lexicons_json) { 114 + let deferred2_0; 115 + let deferred2_1; 116 + try { 117 + const ptr0 = passStringToWasm0(lexicons_json, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); 118 + const len0 = WASM_VECTOR_LEN; 119 + const ret = wasm.validate_lexicons_and_get_errors(ptr0, len0); 120 + deferred2_0 = ret[0]; 121 + deferred2_1 = ret[1]; 122 + return getStringFromWasm0(ret[0], ret[1]); 123 + } finally { 124 + wasm.__wbindgen_free(deferred2_0, deferred2_1, 1); 125 + } 126 + } 127 + 128 + /** 108 129 * @param {string} value 109 130 * @param {string} format 110 131 */ ··· 186 207 const ret = wasm.wasmlexiconvalidator_validate_lexicon_set_completeness(this.__wbg_ptr); 187 208 if (ret[1]) { 188 209 throw takeFromExternrefTable0(ret[0]); 210 + } 211 + } 212 + /** 213 + * Get validation errors grouped by lexicon ID 214 + * Returns a JSON string containing a map of lexicon ID to error arrays 215 + * @returns {string} 216 + */ 217 + get_validation_errors_by_lexicon() { 218 + let deferred1_0; 219 + let deferred1_1; 220 + try { 221 + const ret = wasm.wasmlexiconvalidator_get_validation_errors_by_lexicon(this.__wbg_ptr); 222 + deferred1_0 = ret[0]; 223 + deferred1_1 = ret[1]; 224 + return getStringFromWasm0(ret[0], ret[1]); 225 + } finally { 226 + wasm.__wbindgen_free(deferred1_0, deferred1_1, 1); 189 227 } 190 228 } 191 229 }
packages/lexicon-intellisense/wasm/slices_lexicon_bg.wasm

This is a binary file and will not be displayed.

+2
packages/lexicon-intellisense/wasm/slices_lexicon_bg.wasm.d.ts
··· 5 5 export const wasmlexiconvalidator_new: (a: number, b: number) => [number, number, number]; 6 6 export const wasmlexiconvalidator_validate_record: (a: number, b: number, c: number, d: number, e: number) => [number, number]; 7 7 export const wasmlexiconvalidator_validate_lexicon_set_completeness: (a: number) => [number, number]; 8 + export const wasmlexiconvalidator_get_validation_errors_by_lexicon: (a: number) => [number, number]; 9 + export const validate_lexicons_and_get_errors: (a: number, b: number) => [number, number]; 8 10 export const validate_string_format: (a: number, b: number, c: number, d: number) => [number, number]; 9 11 export const is_valid_nsid: (a: number, b: number) => number; 10 12 export const main: () => void;
+25
packages/lexicon-rs/src/lib.rs
··· 65 65 .validate_lexicon_set_completeness() 66 66 .map_err(|e| JsValue::from_str(&e.to_string())) 67 67 } 68 + 69 + /// Get validation errors grouped by lexicon ID 70 + /// Returns a JSON string containing a map of lexicon ID to error arrays 71 + #[wasm_bindgen] 72 + pub fn get_validation_errors_by_lexicon(&self) -> String { 73 + let errors = self.inner.get_validation_errors_by_lexicon(); 74 + serde_json::to_string(&errors).unwrap_or_else(|_| "{}".to_string()) 75 + } 76 + } 77 + 78 + /// Validate lexicons and return errors without creating a validator instance 79 + /// Returns a JSON string containing a map of lexicon ID to error arrays 80 + #[wasm_bindgen] 81 + pub fn validate_lexicons_and_get_errors(lexicons_json: &str) -> String { 82 + let lexicons: Vec<Value> = match serde_json::from_str(lexicons_json) { 83 + Ok(l) => l, 84 + Err(e) => { 85 + let mut error_map = std::collections::HashMap::new(); 86 + error_map.insert("parse_error".to_string(), vec![format!("Failed to parse lexicons JSON: {}", e)]); 87 + return serde_json::to_string(&error_map).unwrap_or_else(|_| "{}".to_string()); 88 + } 89 + }; 90 + 91 + let errors = LexiconValidator::validate_and_get_errors(lexicons); 92 + serde_json::to_string(&errors).unwrap_or_else(|_| "{}".to_string()) 68 93 } 69 94 70 95 // Export individual validation functions for more granular use
+804 -85
packages/lexicon-rs/src/validator.rs
··· 7 7 use super::errors::ValidationError; 8 8 use super::types::{ 9 9 ArrayConstraints, BlobConstraints, BooleanConstraints, BytesConstraints, IntegerConstraints, 10 - LexiconDoc, StringConstraints, StringFormat, ValidationContext 10 + LexiconDoc, StringConstraints, StringFormat, ValidationContext, 11 11 }; 12 12 13 13 #[derive(Clone, Debug)] ··· 41 41 } 42 42 43 43 // Validate lexicon version field 44 - let lexicon_version = lexicon_value["lexicon"] 45 - .as_u64() 46 - .ok_or_else(|| ValidationError::InvalidSchema("Missing or invalid lexicon version field".to_string()))?; 44 + let lexicon_version = lexicon_value["lexicon"].as_u64().ok_or_else(|| { 45 + ValidationError::InvalidSchema( 46 + "Missing or invalid lexicon version field".to_string(), 47 + ) 48 + })?; 47 49 48 50 if lexicon_version != 1 { 49 51 return Err(ValidationError::InvalidSchema(format!( ··· 73 75 ))); 74 76 } 75 77 76 - let lexicon_doc = LexiconDoc { id: id.clone(), defs }; 78 + let lexicon_doc = LexiconDoc { 79 + id: id.clone(), 80 + defs, 81 + }; 77 82 78 83 // Validate the lexicon definitions immediately upon loading 79 84 Self::validate_lexicon_definitions_static(&lexicon_doc.defs)?; ··· 86 91 }) 87 92 } 88 93 94 + /// Validate lexicons and return errors grouped by lexicon ID 95 + /// This is a static method that doesn't throw on validation errors 96 + /// Includes both structural validation and cross-reference checking 97 + pub fn validate_and_get_errors(lexicons: Vec<Value>) -> HashMap<String, Vec<String>> { 98 + let mut errors_by_lexicon: HashMap<String, Vec<String>> = HashMap::new(); 99 + let mut lexicon_map = HashMap::new(); 100 + 101 + // First pass: validate structure and build lexicon map 102 + for lexicon_value in lexicons { 103 + let mut lexicon_errors = Vec::new(); 104 + 105 + // Try to extract lexicon ID for error reporting 106 + let lexicon_id = lexicon_value["id"] 107 + .as_str() 108 + .unwrap_or("unknown") 109 + .to_string(); 110 + 111 + // Validate that only allowed top-level fields are present 112 + if let Some(obj) = lexicon_value.as_object() { 113 + let allowed_fields = ["lexicon", "id", "defs", "revision", "description"]; 114 + let mut unknown_fields = Vec::new(); 115 + 116 + for key in obj.keys() { 117 + if !allowed_fields.contains(&key.as_str()) { 118 + unknown_fields.push(key.clone()); 119 + } 120 + } 121 + 122 + if !unknown_fields.is_empty() { 123 + lexicon_errors.push(format!( 124 + "Unrecognized key(s) in lexicon document: {}", 125 + unknown_fields.join(", ") 126 + )); 127 + errors_by_lexicon.insert(lexicon_id, lexicon_errors); 128 + continue; 129 + } 130 + } 131 + 132 + // Validate lexicon version field 133 + let lexicon_version = lexicon_value["lexicon"].as_u64(); 134 + if lexicon_version.is_none() { 135 + lexicon_errors.push("Missing or invalid lexicon version field".to_string()); 136 + errors_by_lexicon.insert(lexicon_id, lexicon_errors); 137 + continue; 138 + } 139 + 140 + if lexicon_version != Some(1) { 141 + lexicon_errors.push(format!( 142 + "Unsupported lexicon version: {}. Only version 1 is supported.", 143 + lexicon_version.unwrap() 144 + )); 145 + errors_by_lexicon.insert(lexicon_id, lexicon_errors); 146 + continue; 147 + } 148 + 149 + // Validate lexicon ID format (NSID) 150 + if !crate::is_valid_nsid(&lexicon_id) { 151 + lexicon_errors.push(format!( 152 + "Invalid lexicon ID format: '{}'. Must be a valid NSID.", 153 + lexicon_id 154 + )); 155 + errors_by_lexicon.insert(lexicon_id, lexicon_errors); 156 + continue; 157 + } 158 + 159 + let defs = lexicon_value["defs"].clone(); 160 + if defs.is_null() { 161 + lexicon_errors.push(format!("Missing defs in lexicon {}", lexicon_id)); 162 + errors_by_lexicon.insert(lexicon_id, lexicon_errors); 163 + continue; 164 + } 165 + 166 + // Validate the lexicon definitions 167 + if let Err(e) = Self::validate_lexicon_definitions_static(&defs) { 168 + match e { 169 + ValidationError::MultipleErrors { errors } => { 170 + lexicon_errors.extend(errors); 171 + } 172 + _ => { 173 + lexicon_errors.push(e.to_string()); 174 + } 175 + } 176 + errors_by_lexicon.insert(lexicon_id.clone(), lexicon_errors); 177 + } 178 + 179 + // Add to lexicon map for cross-reference checking 180 + lexicon_map.insert(lexicon_id.clone(), LexiconDoc { 181 + id: lexicon_id, 182 + defs, 183 + }); 184 + } 185 + 186 + // Second pass: check cross-references 187 + let validator = Self { 188 + lexicons: lexicon_map, 189 + }; 190 + 191 + for (lexicon_id, lexicon) in &validator.lexicons { 192 + let mut missing_refs = Vec::new(); 193 + if let Err(e) = validator.collect_missing_references(lexicon_id, &lexicon.defs, &mut missing_refs) { 194 + errors_by_lexicon.entry(lexicon_id.clone()) 195 + .or_insert_with(Vec::new) 196 + .push(e); 197 + } else { 198 + for missing_ref in missing_refs { 199 + errors_by_lexicon.entry(lexicon_id.clone()) 200 + .or_insert_with(Vec::new) 201 + .push(missing_ref); 202 + } 203 + } 204 + } 205 + 206 + errors_by_lexicon 207 + } 208 + 89 209 /// Validate a record against its collection's lexicon 90 210 pub fn validate_record(&self, collection: &str, record: &Value) -> Result<(), ValidationError> { 91 211 // Parse collection string which might have fragment (#object, #main, etc) ··· 186 306 match def_type { 187 307 "record" => { 188 308 if let Err(e) = Self::validate_record_definition_static(def_name, def_value) { 189 - errors.push(e.to_string()); 309 + match e { 310 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 311 + _ => errors.push(e.to_string()), 312 + } 313 + } 314 + } 315 + "query" => { 316 + if let Err(e) = Self::validate_query_definition_static(def_name, def_value) { 317 + match e { 318 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 319 + _ => errors.push(e.to_string()), 320 + } 321 + } 322 + } 323 + "procedure" => { 324 + if let Err(e) = Self::validate_procedure_definition_static(def_name, def_value) { 325 + match e { 326 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 327 + _ => errors.push(e.to_string()), 328 + } 329 + } 330 + } 331 + "subscription" => { 332 + if let Err(e) = Self::validate_subscription_definition_static(def_name, def_value) { 333 + match e { 334 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 335 + _ => errors.push(e.to_string()), 336 + } 190 337 } 191 338 } 192 339 "object" => { 193 340 if let Err(e) = Self::validate_object_definition_static(def_name, def_value) { 194 - errors.push(e.to_string()); 341 + match e { 342 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 343 + _ => errors.push(e.to_string()), 344 + } 195 345 } 196 346 } 197 347 "string" | "integer" | "boolean" | "array" | "union" | "ref" | "blob" | "bytes" 198 348 | "cid-link" | "unknown" | "token" | "null" => { 199 349 // Basic types are valid, could add more specific validation here 200 - if let Err(e) = Self::validate_type_definition_static(def_name, def_value, def_type) { 201 - errors.push(e.to_string()); 350 + if let Err(e) = 351 + Self::validate_type_definition_static(def_name, def_value, def_type) 352 + { 353 + match e { 354 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 355 + _ => errors.push(e.to_string()), 356 + } 202 357 } 203 358 } 204 359 _ => { 205 360 errors.push(format!( 206 - "Definition '{}' has unknown type '{}'. Valid types are: record, object, string, integer, boolean, array, union, ref, blob, bytes, cid-link, unknown, token, null", 361 + "Definition '{}' has unknown type '{}'. Valid types are: record, query, procedure, subscription, object, string, integer, boolean, array, union, ref, blob, bytes, cid-link, unknown, token, null", 207 362 def_name, def_type 208 363 )); 209 364 } ··· 313 468 let mut errors = Vec::new(); 314 469 315 470 // Validate allowed fields for record definition 316 - let allowed_record_fields = [ 317 - "type", "record", "description", "key" 318 - ]; 319 - Self::validate_allowed_fields(def_name, def_value, &allowed_record_fields, "Record", &mut errors); 471 + let allowed_record_fields = ["type", "record", "description", "key"]; 472 + Self::validate_allowed_fields( 473 + def_name, 474 + def_value, 475 + &allowed_record_fields, 476 + "Record", 477 + &mut errors, 478 + ); 479 + 480 + // Record definitions must have a "key" field 481 + if let Some(key) = def_value.get("key") { 482 + if let Some(key_str) = key.as_str() { 483 + if !matches!(key_str, "literal:self" | "tid" | "any") { 484 + errors.push(format!( 485 + "Record definition '{}' key must be 'literal:self', 'tid', or 'any', got '{}'", 486 + def_name, key_str 487 + )); 488 + } 489 + } else { 490 + errors.push(format!( 491 + "Record definition '{}' key must be a string", 492 + def_name 493 + )); 494 + } 495 + } else { 496 + errors.push(format!( 497 + "Record definition '{}' missing required 'key' field", 498 + def_name 499 + )); 500 + } 320 501 321 502 // Record definitions should have a "record" field 322 503 let record_def = match def_value.get("record") { ··· 338 519 } 339 520 340 521 // Validate allowed fields for the nested record object 341 - let allowed_object_fields = [ 342 - "type", "properties", "required", "nullable", "description" 343 - ]; 344 - Self::validate_allowed_fields(&format!("{}.record", def_name), record_def, &allowed_object_fields, "Object", &mut errors); 522 + let allowed_object_fields = ["type", "properties", "required", "nullable", "description"]; 523 + Self::validate_allowed_fields( 524 + &format!("{}.record", def_name), 525 + record_def, 526 + &allowed_object_fields, 527 + "Object", 528 + &mut errors, 529 + ); 345 530 346 531 // Validate properties if they exist 347 532 if let Some(properties) = record_def.get("properties") { ··· 353 538 } else { 354 539 // Validate each property's type 355 540 for (prop_name, prop_def) in properties.as_object().unwrap() { 356 - Self::validate_property_definition_static(def_name, prop_name, prop_def, &mut errors); 541 + Self::validate_property_definition_static( 542 + def_name, 543 + prop_name, 544 + prop_def, 545 + &mut errors, 546 + ); 357 547 } 358 548 } 359 549 } ··· 389 579 } 390 580 } 391 581 582 + /// Static version of query definition validation 583 + fn validate_query_definition_static( 584 + def_name: &str, 585 + def_value: &Value, 586 + ) -> Result<(), ValidationError> { 587 + let mut errors = Vec::new(); 588 + 589 + // Validate allowed fields for query definition 590 + let allowed_fields = ["type", "description", "parameters", "output", "errors"]; 591 + Self::validate_allowed_fields( 592 + def_name, 593 + def_value, 594 + &allowed_fields, 595 + "Query", 596 + &mut errors, 597 + ); 598 + 599 + // Validate parameters if present 600 + if let Some(params) = def_value.get("parameters") { 601 + if let Err(e) = Self::validate_params_definition_static(def_name, params) { 602 + match e { 603 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 604 + _ => errors.push(e.to_string()), 605 + } 606 + } 607 + } 608 + 609 + // Validate output if present 610 + if let Some(output) = def_value.get("output") { 611 + if let Err(e) = Self::validate_output_definition_static(def_name, output) { 612 + match e { 613 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 614 + _ => errors.push(e.to_string()), 615 + } 616 + } 617 + } 618 + 619 + // Validate errors if present 620 + if let Some(errors_field) = def_value.get("errors") { 621 + if let Err(e) = Self::validate_errors_definition_static(def_name, errors_field) { 622 + match e { 623 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 624 + _ => errors.push(e.to_string()), 625 + } 626 + } 627 + } 628 + 629 + if errors.is_empty() { 630 + Ok(()) 631 + } else { 632 + Err(ValidationError::MultipleErrors { errors }) 633 + } 634 + } 635 + 636 + /// Static version of procedure definition validation 637 + fn validate_procedure_definition_static( 638 + def_name: &str, 639 + def_value: &Value, 640 + ) -> Result<(), ValidationError> { 641 + let mut errors = Vec::new(); 642 + 643 + // Validate allowed fields for procedure definition 644 + let allowed_fields = ["type", "description", "parameters", "input", "output", "errors"]; 645 + Self::validate_allowed_fields( 646 + def_name, 647 + def_value, 648 + &allowed_fields, 649 + "Procedure", 650 + &mut errors, 651 + ); 652 + 653 + // Validate parameters if present 654 + if let Some(params) = def_value.get("parameters") { 655 + if let Err(e) = Self::validate_params_definition_static(def_name, params) { 656 + match e { 657 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 658 + _ => errors.push(e.to_string()), 659 + } 660 + } 661 + } 662 + 663 + // Validate input if present 664 + if let Some(input) = def_value.get("input") { 665 + if let Err(e) = Self::validate_output_definition_static(def_name, input) { 666 + match e { 667 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 668 + _ => errors.push(e.to_string()), 669 + } 670 + } 671 + } 672 + 673 + // Validate output if present 674 + if let Some(output) = def_value.get("output") { 675 + if let Err(e) = Self::validate_output_definition_static(def_name, output) { 676 + match e { 677 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 678 + _ => errors.push(e.to_string()), 679 + } 680 + } 681 + } 682 + 683 + // Validate errors if present 684 + if let Some(errors_field) = def_value.get("errors") { 685 + if let Err(e) = Self::validate_errors_definition_static(def_name, errors_field) { 686 + match e { 687 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 688 + _ => errors.push(e.to_string()), 689 + } 690 + } 691 + } 692 + 693 + if errors.is_empty() { 694 + Ok(()) 695 + } else { 696 + Err(ValidationError::MultipleErrors { errors }) 697 + } 698 + } 699 + 700 + /// Static version of subscription definition validation 701 + fn validate_subscription_definition_static( 702 + def_name: &str, 703 + def_value: &Value, 704 + ) -> Result<(), ValidationError> { 705 + let mut errors = Vec::new(); 706 + 707 + // Validate allowed fields for subscription definition 708 + let allowed_fields = ["type", "description", "parameters", "message", "errors"]; 709 + Self::validate_allowed_fields( 710 + def_name, 711 + def_value, 712 + &allowed_fields, 713 + "Subscription", 714 + &mut errors, 715 + ); 716 + 717 + // Validate parameters if present 718 + if let Some(params) = def_value.get("parameters") { 719 + if let Err(e) = Self::validate_params_definition_static(def_name, params) { 720 + match e { 721 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 722 + _ => errors.push(e.to_string()), 723 + } 724 + } 725 + } 726 + 727 + // Validate message if present 728 + if let Some(message) = def_value.get("message") { 729 + // Message must have a schema field 730 + if let Some(schema) = message.get("schema") { 731 + // Schema must be a union of refs 732 + if schema.get("type").and_then(|t| t.as_str()) != Some("union") { 733 + errors.push(format!( 734 + "Subscription '{}' message schema must be a union type", 735 + def_name 736 + )); 737 + } 738 + } else { 739 + errors.push(format!( 740 + "Subscription '{}' message must have a schema field", 741 + def_name 742 + )); 743 + } 744 + } 745 + 746 + // Validate errors if present 747 + if let Some(errors_field) = def_value.get("errors") { 748 + if let Err(e) = Self::validate_errors_definition_static(def_name, errors_field) { 749 + match e { 750 + ValidationError::MultipleErrors { errors: errs } => errors.extend(errs), 751 + _ => errors.push(e.to_string()), 752 + } 753 + } 754 + } 755 + 756 + if errors.is_empty() { 757 + Ok(()) 758 + } else { 759 + Err(ValidationError::MultipleErrors { errors }) 760 + } 761 + } 762 + 763 + /// Validate params definition 764 + fn validate_params_definition_static( 765 + def_name: &str, 766 + params: &Value, 767 + ) -> Result<(), ValidationError> { 768 + let mut errors = Vec::new(); 769 + 770 + // Params must have type "params" 771 + if params.get("type").and_then(|t| t.as_str()) != Some("params") { 772 + errors.push(format!( 773 + "Definition '{}' parameters must have type 'params'", 774 + def_name 775 + )); 776 + } 777 + 778 + // Validate properties if present 779 + if let Some(properties) = params.get("properties") { 780 + if !properties.is_object() { 781 + errors.push(format!( 782 + "Definition '{}' parameters properties must be an object", 783 + def_name 784 + )); 785 + } else { 786 + // Validate each property - must be boolean, integer, string, unknown, or array of these 787 + for (prop_name, prop_def) in properties.as_object().unwrap() { 788 + if let Some(prop_type) = prop_def.get("type").and_then(|t| t.as_str()) { 789 + match prop_type { 790 + "boolean" | "integer" | "string" | "unknown" => {}, 791 + "array" => { 792 + // Check items type 793 + if let Some(items) = prop_def.get("items") { 794 + if let Some(items_type) = items.get("type").and_then(|t| t.as_str()) { 795 + if !matches!(items_type, "boolean" | "integer" | "string" | "unknown") { 796 + errors.push(format!( 797 + "Parameters property '{}' array items must be boolean, integer, string, or unknown", 798 + prop_name 799 + )); 800 + } 801 + } 802 + } 803 + } 804 + _ => { 805 + errors.push(format!( 806 + "Parameters property '{}' has invalid type '{}'. Must be boolean, integer, string, unknown, or array", 807 + prop_name, prop_type 808 + )); 809 + } 810 + } 811 + } 812 + } 813 + } 814 + } 815 + 816 + if errors.is_empty() { 817 + Ok(()) 818 + } else { 819 + Err(ValidationError::MultipleErrors { errors }) 820 + } 821 + } 822 + 823 + /// Validate output/input definition 824 + fn validate_output_definition_static( 825 + def_name: &str, 826 + output: &Value, 827 + ) -> Result<(), ValidationError> { 828 + let mut errors = Vec::new(); 829 + 830 + // Must have encoding field 831 + if output.get("encoding").is_none() { 832 + errors.push(format!( 833 + "Definition '{}' output must have 'encoding' field", 834 + def_name 835 + )); 836 + } else if !output.get("encoding").unwrap().is_string() { 837 + errors.push(format!( 838 + "Definition '{}' output encoding must be a string", 839 + def_name 840 + )); 841 + } 842 + 843 + // Schema is optional, but if present must be object, ref, or union 844 + if let Some(schema) = output.get("schema") { 845 + if let Some(schema_type) = schema.get("type").and_then(|t| t.as_str()) { 846 + if !matches!(schema_type, "object" | "ref" | "union") { 847 + errors.push(format!( 848 + "Definition '{}' output schema must be object, ref, or union type", 849 + def_name 850 + )); 851 + } else { 852 + // Recursively validate the schema definition 853 + if let Err(e) = Self::validate_type_definition_static( 854 + &format!("{}.output.schema", def_name), 855 + schema, 856 + schema_type, 857 + ) { 858 + match e { 859 + ValidationError::MultipleErrors { errors: errs } => { 860 + errors.extend(errs); 861 + } 862 + _ => { 863 + errors.push(e.to_string()); 864 + } 865 + } 866 + } 867 + } 868 + } 869 + } 870 + 871 + if errors.is_empty() { 872 + Ok(()) 873 + } else { 874 + Err(ValidationError::MultipleErrors { errors }) 875 + } 876 + } 877 + 878 + /// Validate errors definition 879 + fn validate_errors_definition_static( 880 + def_name: &str, 881 + errors_field: &Value, 882 + ) -> Result<(), ValidationError> { 883 + let mut errors = Vec::new(); 884 + 885 + if !errors_field.is_array() { 886 + errors.push(format!( 887 + "Definition '{}' errors must be an array", 888 + def_name 889 + )); 890 + } else { 891 + for (i, error_def) in errors_field.as_array().unwrap().iter().enumerate() { 892 + // Each error must have a name 893 + if error_def.get("name").is_none() { 894 + errors.push(format!( 895 + "Definition '{}' error {} missing 'name' field", 896 + def_name, i 897 + )); 898 + } else if !error_def.get("name").unwrap().is_string() { 899 + errors.push(format!( 900 + "Definition '{}' error {} name must be a string", 901 + def_name, i 902 + )); 903 + } else { 904 + let name = error_def.get("name").unwrap().as_str().unwrap(); 905 + if name.contains(char::is_whitespace) { 906 + errors.push(format!( 907 + "Definition '{}' error name '{}' must not contain whitespace", 908 + def_name, name 909 + )); 910 + } 911 + } 912 + 913 + // Description is optional but must be string if present 914 + if let Some(desc) = error_def.get("description") { 915 + if !desc.is_string() { 916 + errors.push(format!( 917 + "Definition '{}' error {} description must be a string", 918 + def_name, i 919 + )); 920 + } 921 + } 922 + } 923 + } 924 + 925 + if errors.is_empty() { 926 + Ok(()) 927 + } else { 928 + Err(ValidationError::MultipleErrors { errors }) 929 + } 930 + } 931 + 392 932 /// Static version of object definition validation 393 933 fn validate_object_definition_static( 394 934 def_name: &str, ··· 397 937 let mut errors = Vec::new(); 398 938 399 939 // Validate allowed fields 400 - let allowed_object_fields = [ 401 - "type", "properties", "required", "nullable", "description" 402 - ]; 403 - Self::validate_allowed_fields(def_name, def_value, &allowed_object_fields, "Object", &mut errors); 940 + let allowed_object_fields = ["type", "properties", "required", "nullable", "description"]; 941 + Self::validate_allowed_fields( 942 + def_name, 943 + def_value, 944 + &allowed_object_fields, 945 + "Object", 946 + &mut errors, 947 + ); 404 948 405 949 // Object definitions should have properties 406 950 if let Some(properties) = def_value.get("properties") { ··· 412 956 } else { 413 957 // Validate each property's type 414 958 for (prop_name, prop_def) in properties.as_object().unwrap() { 415 - Self::validate_property_definition_static(def_name, prop_name, prop_def, &mut errors); 959 + Self::validate_property_definition_static( 960 + def_name, 961 + prop_name, 962 + prop_def, 963 + &mut errors, 964 + ); 416 965 } 417 966 } 418 967 } ··· 498 1047 // Apply AT Protocol structural validation rules 499 1048 match type_name { 500 1049 "object" => { 501 - let allowed_object_fields = [ 502 - "type", "properties", "required", "nullable", "description" 503 - ]; 504 - Self::validate_allowed_fields(def_name, def_value, &allowed_object_fields, "Object", &mut errors); 1050 + let allowed_object_fields = 1051 + ["type", "properties", "required", "nullable", "description"]; 1052 + Self::validate_allowed_fields( 1053 + def_name, 1054 + def_value, 1055 + &allowed_object_fields, 1056 + "Object", 1057 + &mut errors, 1058 + ); 505 1059 506 1060 // Object type should have properties field 507 1061 if let Some(properties) = def_value.get("properties") { ··· 513 1067 } else { 514 1068 // Validate each property's type 515 1069 for (prop_name, prop_def) in properties.as_object().unwrap() { 516 - Self::validate_property_definition_static(def_name, prop_name, prop_def, &mut errors); 1070 + Self::validate_property_definition_static( 1071 + def_name, 1072 + prop_name, 1073 + prop_def, 1074 + &mut errors, 1075 + ); 517 1076 } 518 1077 } 519 1078 } ··· 527 1086 )); 528 1087 } else { 529 1088 // Check that all required fields exist in properties 530 - if let Some(properties) = def_value.get("properties").and_then(|p| p.as_object()) { 1089 + if let Some(properties) = 1090 + def_value.get("properties").and_then(|p| p.as_object()) 1091 + { 531 1092 for req_field in required.as_array().unwrap() { 532 1093 if let Some(field_name) = req_field.as_str() { 533 1094 if !properties.contains_key(field_name) { ··· 551 1112 )); 552 1113 } 553 1114 } 554 - }, 1115 + } 555 1116 "array" => { 556 1117 // Array type must have items field 557 1118 if def_value.get("items").is_none() { ··· 562 1123 } else { 563 1124 let items = &def_value["items"]; 564 1125 // Validate that items has a type 565 - if !items.get("type").and_then(|t| t.as_str()).is_some() && 566 - !items.get("ref").and_then(|r| r.as_str()).is_some() { 1126 + if !items.get("type").and_then(|t| t.as_str()).is_some() 1127 + && !items.get("ref").and_then(|r| r.as_str()).is_some() 1128 + { 567 1129 errors.push(format!( 568 1130 "Array definition '{}' items must have 'type' or 'ref' field", 569 1131 def_name 570 1132 )); 571 1133 } 572 1134 } 573 - }, 1135 + } 574 1136 "union" => { 575 1137 // Union type must have refs field 576 1138 if def_value.get("refs").is_none() { ··· 592 1154 )); 593 1155 } 594 1156 } 595 - }, 1157 + } 596 1158 "ref" => { 597 1159 // Reference type must have ref field 598 1160 if def_value.get("ref").is_none() { ··· 607 1169 "Reference definition '{}' ref field must be a string", 608 1170 def_name 609 1171 )); 1172 + } else { 1173 + // Validate ref value format 1174 + let ref_str = ref_val.as_str().unwrap(); 1175 + if !ref_str.starts_with('#') && !ref_str.contains('.') { 1176 + errors.push(format!( 1177 + "Reference definition '{}' ref field '{}' must be either a local reference (starting with #) or a lexicon reference (containing .)", 1178 + def_name, ref_str 1179 + )); 1180 + } 610 1181 } 611 1182 } 612 - }, 1183 + } 613 1184 "blob" => { 614 1185 // Blob type constraints are validated at runtime, not at definition time 615 1186 // But we can validate accept and maxSize field types if present ··· 629 1200 )); 630 1201 } 631 1202 } 632 - }, 1203 + } 633 1204 "string" => { 634 1205 // Validate string constraint field types 635 1206 if let Some(min_len) = def_value.get("minLength") { ··· 665 1236 } else if StringFormat::from_str(format.as_str().unwrap()).is_none() { 666 1237 errors.push(format!( 667 1238 "String definition '{}' has unknown format '{}'", 668 - def_name, format.as_str().unwrap() 1239 + def_name, 1240 + format.as_str().unwrap() 669 1241 )); 670 1242 } 671 1243 } 672 - }, 1244 + } 673 1245 "integer" => { 674 1246 // Validate integer constraint field types 675 1247 if let Some(min) = def_value.get("minimum") { ··· 696 1268 )); 697 1269 } 698 1270 } 699 - }, 1271 + } 700 1272 "bytes" => { 701 1273 // Validate bytes constraint field types 702 1274 if let Some(min_len) = def_value.get("minLength") { ··· 715 1287 )); 716 1288 } 717 1289 } 718 - }, 1290 + } 719 1291 // token, null, unknown, cid-link, boolean don't require special structural validation 720 1292 _ => {} 721 1293 } ··· 739 1311 match prop_type { 740 1312 "string" => { 741 1313 let allowed_fields = [ 742 - "type", "description", "default", "const", "enum", "format", 743 - "minLength", "maxLength", "minGraphemes", "maxGraphemes", "knownValues" 1314 + "type", 1315 + "description", 1316 + "default", 1317 + "const", 1318 + "enum", 1319 + "format", 1320 + "minLength", 1321 + "maxLength", 1322 + "minGraphemes", 1323 + "maxGraphemes", 1324 + "knownValues", 744 1325 ]; 745 1326 Self::validate_allowed_fields( 746 1327 &format!("{}.{}", parent_name, prop_name), 747 1328 prop_def, 748 1329 &allowed_fields, 749 1330 "String property", 750 - errors 1331 + errors, 751 1332 ); 752 1333 753 1334 // Validate default value against constraints 754 1335 if let Some(default_val) = prop_def.get("default").and_then(|v| v.as_str()) { 755 - Self::validate_string_default(parent_name, prop_name, default_val, prop_def, errors); 1336 + Self::validate_string_default( 1337 + parent_name, 1338 + prop_name, 1339 + default_val, 1340 + prop_def, 1341 + errors, 1342 + ); 756 1343 } 757 1344 } 758 1345 "integer" => { 759 1346 let allowed_fields = [ 760 - "type", "description", "default", "const", "enum", 761 - "minimum", "maximum" 1347 + "type", 1348 + "description", 1349 + "default", 1350 + "const", 1351 + "enum", 1352 + "minimum", 1353 + "maximum", 762 1354 ]; 763 1355 Self::validate_allowed_fields( 764 1356 &format!("{}.{}", parent_name, prop_name), 765 1357 prop_def, 766 1358 &allowed_fields, 767 1359 "Integer property", 768 - errors 1360 + errors, 769 1361 ); 770 1362 771 1363 // Validate default value against constraints 772 1364 if let Some(default_val) = prop_def.get("default").and_then(|v| v.as_i64()) { 773 - Self::validate_integer_default(parent_name, prop_name, default_val, prop_def, errors); 1365 + Self::validate_integer_default( 1366 + parent_name, 1367 + prop_name, 1368 + default_val, 1369 + prop_def, 1370 + errors, 1371 + ); 774 1372 } 775 1373 } 776 1374 "boolean" => { ··· 780 1378 prop_def, 781 1379 &allowed_fields, 782 1380 "Boolean property", 783 - errors 1381 + errors, 784 1382 ); 785 1383 786 1384 // Validate default value against constraints 787 1385 if let Some(default_val) = prop_def.get("default").and_then(|v| v.as_bool()) { 788 - Self::validate_boolean_default(parent_name, prop_name, default_val, prop_def, errors); 1386 + Self::validate_boolean_default( 1387 + parent_name, 1388 + prop_name, 1389 + default_val, 1390 + prop_def, 1391 + errors, 1392 + ); 789 1393 } 790 1394 } 791 1395 "array" => { 792 - let allowed_fields = [ 793 - "type", "description", "items", "minLength", "maxLength" 794 - ]; 1396 + let allowed_fields = ["type", "description", "items", "minLength", "maxLength"]; 795 1397 Self::validate_allowed_fields( 796 1398 &format!("{}.{}", parent_name, prop_name), 797 1399 prop_def, 798 1400 &allowed_fields, 799 1401 "Array property", 800 - errors 1402 + errors, 801 1403 ); 802 1404 803 1405 // Validate items if present 804 1406 if let Some(items) = prop_def.get("items") { 805 1407 let items_path = format!("{}.items", prop_name); 806 - Self::validate_property_definition_static(parent_name, &items_path, items, errors); 1408 + Self::validate_property_definition_static( 1409 + parent_name, 1410 + &items_path, 1411 + items, 1412 + errors, 1413 + ); 807 1414 } 808 1415 } 809 1416 "object" => { 810 - let allowed_fields = [ 811 - "type", "description", "properties", "required", "nullable" 812 - ]; 1417 + let allowed_fields = 1418 + ["type", "description", "properties", "required", "nullable"]; 813 1419 Self::validate_allowed_fields( 814 1420 &format!("{}.{}", parent_name, prop_name), 815 1421 prop_def, 816 1422 &allowed_fields, 817 1423 "Object property", 818 - errors 1424 + errors, 819 1425 ); 820 1426 821 1427 // Recursively validate nested properties ··· 828 1434 } else { 829 1435 for (nested_name, nested_def) in nested_props.as_object().unwrap() { 830 1436 let nested_path = format!("{}.{}", prop_name, nested_name); 831 - Self::validate_property_definition_static(parent_name, &nested_path, nested_def, errors); 1437 + Self::validate_property_definition_static( 1438 + parent_name, 1439 + &nested_path, 1440 + nested_def, 1441 + errors, 1442 + ); 832 1443 } 833 1444 } 834 1445 } ··· 840 1451 prop_def, 841 1452 &allowed_fields, 842 1453 "Ref property", 843 - errors 1454 + errors, 844 1455 ); 845 1456 } 846 1457 "union" => { ··· 850 1461 prop_def, 851 1462 &allowed_fields, 852 1463 "Union property", 853 - errors 1464 + errors, 854 1465 ); 855 1466 } 856 1467 "blob" => { ··· 860 1471 prop_def, 861 1472 &allowed_fields, 862 1473 "Blob property", 863 - errors 1474 + errors, 864 1475 ); 865 1476 } 866 1477 "bytes" => { ··· 870 1481 prop_def, 871 1482 &allowed_fields, 872 1483 "Bytes property", 873 - errors 1484 + errors, 874 1485 ); 875 1486 } 876 1487 "record" | "cid-link" | "unknown" | "token" | "null" => { ··· 880 1491 prop_def, 881 1492 &allowed_fields, 882 1493 &format!("{} property", prop_type), 883 - errors 1494 + errors, 884 1495 ); 885 1496 } 886 1497 _ => { ··· 981 1592 982 1593 for (nested_name, nested_def) in nested_props.as_object().unwrap() { 983 1594 let nested_path = format!("{}.{}", prop_name, nested_name); 984 - self.validate_property_definition(parent_name, &nested_path, nested_def)?; 1595 + self.validate_property_definition( 1596 + parent_name, 1597 + &nested_path, 1598 + nested_def, 1599 + )?; 985 1600 } 986 1601 } 987 1602 } ··· 1014 1629 } 1015 1630 } 1016 1631 1632 + /// Get validation errors grouped by lexicon ID 1633 + /// Returns a HashMap where keys are lexicon IDs and values are vectors of error messages 1634 + pub fn get_validation_errors_by_lexicon(&self) -> HashMap<String, Vec<String>> { 1635 + let mut errors_by_lexicon = HashMap::new(); 1636 + 1637 + // Check each lexicon for validation errors 1638 + for (lexicon_id, lexicon) in &self.lexicons { 1639 + let mut lexicon_errors = Vec::new(); 1640 + 1641 + // Collect missing references for this lexicon 1642 + let mut missing_refs = Vec::new(); 1643 + if let Err(e) = 1644 + self.collect_missing_references(lexicon_id, &lexicon.defs, &mut missing_refs) 1645 + { 1646 + lexicon_errors.push(e); 1647 + } else { 1648 + // Add individual missing reference errors 1649 + for missing_ref in missing_refs { 1650 + lexicon_errors.push(missing_ref); 1651 + } 1652 + } 1653 + 1654 + // Only add to map if there are errors 1655 + if !lexicon_errors.is_empty() { 1656 + errors_by_lexicon.insert(lexicon_id.clone(), lexicon_errors); 1657 + } 1658 + } 1659 + 1660 + errors_by_lexicon 1661 + } 1662 + 1017 1663 /// Recursively collect missing cross-lexicon references 1018 1664 fn collect_missing_references( 1019 1665 &self, ··· 1032 1678 let parts: Vec<&str> = ref_str.split('#').collect(); 1033 1679 if parts.len() == 2 { 1034 1680 let target_lexicon = parts[0]; 1681 + let target_def = parts[1]; 1682 + 1035 1683 if !self.lexicons.contains_key(target_lexicon) { 1036 1684 let missing_ref = format!( 1037 1685 "Missing lexicon '{}' (referenced as '{}')", ··· 1040 1688 if !missing_refs.contains(&missing_ref) { 1041 1689 missing_refs.push(missing_ref); 1042 1690 } 1691 + } else if let Some(lexicon) = self.lexicons.get(target_lexicon) { 1692 + // Check if the definition exists 1693 + if !lexicon.defs.get(target_def).is_some() { 1694 + let missing_ref = format!( 1695 + "Missing definition '{}' in lexicon '{}' (referenced as '{}')", 1696 + target_def, target_lexicon, ref_str 1697 + ); 1698 + if !missing_refs.contains(&missing_ref) { 1699 + missing_refs.push(missing_ref); 1700 + } 1701 + } 1043 1702 } 1044 1703 } 1045 1704 } else if ref_str.contains('.') { ··· 1053 1712 if !missing_refs.contains(&missing_ref) { 1054 1713 missing_refs.push(missing_ref); 1055 1714 } 1715 + } else if let Some(lexicon) = self.lexicons.get(ref_str) { 1716 + // Check if 'main' definition exists 1717 + if !lexicon.defs.get("main").is_some() { 1718 + let missing_ref = format!( 1719 + "Missing 'main' definition in lexicon '{}' (referenced as '{}')", 1720 + ref_str, ref_str 1721 + ); 1722 + if !missing_refs.contains(&missing_ref) { 1723 + missing_refs.push(missing_ref); 1724 + } 1725 + } 1056 1726 } 1057 1727 } 1058 1728 } ··· 1068 1738 let parts: Vec<&str> = ref_str.split('#').collect(); 1069 1739 if parts.len() == 2 { 1070 1740 let target_lexicon = parts[0]; 1741 + let target_def = parts[1]; 1742 + 1071 1743 if !self.lexicons.contains_key(target_lexicon) { 1072 1744 let missing_ref = format!( 1073 1745 "Missing lexicon '{}' (referenced as '{}')", ··· 1076 1748 if !missing_refs.contains(&missing_ref) { 1077 1749 missing_refs.push(missing_ref); 1078 1750 } 1751 + } else if let Some(lexicon) = self.lexicons.get(target_lexicon) { 1752 + // Check if the definition exists 1753 + if !lexicon.defs.get(target_def).is_some() { 1754 + let missing_ref = format!( 1755 + "Missing definition '{}' in lexicon '{}' (referenced as '{}')", 1756 + target_def, target_lexicon, ref_str 1757 + ); 1758 + if !missing_refs.contains(&missing_ref) { 1759 + missing_refs.push(missing_ref); 1760 + } 1761 + } 1079 1762 } 1080 1763 } 1081 1764 } else if ref_str.contains('.') { ··· 1087 1770 ); 1088 1771 if !missing_refs.contains(&missing_ref) { 1089 1772 missing_refs.push(missing_ref); 1773 + } 1774 + } else if let Some(lexicon) = self.lexicons.get(ref_str) { 1775 + // Check if 'main' definition exists 1776 + if !lexicon.defs.get("main").is_some() { 1777 + let missing_ref = format!( 1778 + "Missing 'main' definition in lexicon '{}' (referenced as '{}')", 1779 + ref_str, ref_str 1780 + ); 1781 + if !missing_refs.contains(&missing_ref) { 1782 + missing_refs.push(missing_ref); 1783 + } 1090 1784 } 1091 1785 } 1092 1786 } ··· 1616 2310 } 1617 2311 1618 2312 // Check other required fields 1619 - let mime_type = obj.get("mimeType").and_then(|v| v.as_str()).ok_or_else(|| { 1620 - ValidationError::RequiredFieldMissing { 2313 + let mime_type = obj 2314 + .get("mimeType") 2315 + .and_then(|v| v.as_str()) 2316 + .ok_or_else(|| ValidationError::RequiredFieldMissing { 1621 2317 path: ctx.with_field("mimeType").path_string(), 1622 - } 1623 - })?; 2318 + })?; 1624 2319 1625 2320 let size = obj.get("size").and_then(|v| v.as_u64()).ok_or_else(|| { 1626 2321 ValidationError::RequiredFieldMissing { ··· 1749 2444 if value != const_value { 1750 2445 return Err(ValidationError::StringValidationFailed { 1751 2446 path: ctx.path_string(), 1752 - message: format!("Value '{}' does not match const value '{}'", value, const_value), 2447 + message: format!( 2448 + "Value '{}' does not match const value '{}'", 2449 + value, const_value 2450 + ), 1753 2451 }); 1754 2452 } 1755 2453 } ··· 1759 2457 if !enum_values.contains(&value.to_string()) { 1760 2458 return Err(ValidationError::StringValidationFailed { 1761 2459 path: ctx.path_string(), 1762 - message: format!("Value '{}' must be one of ({})", value, enum_values.join("|")), 2460 + message: format!( 2461 + "Value '{}' must be one of ({})", 2462 + value, 2463 + enum_values.join("|") 2464 + ), 1763 2465 }); 1764 2466 } 1765 2467 } ··· 1770 2472 if byte_len < min_len { 1771 2473 return Err(ValidationError::StringValidationFailed { 1772 2474 path: ctx.path_string(), 1773 - message: format!("String length {} is less than minimum {}", byte_len, min_len), 2475 + message: format!( 2476 + "String length {} is less than minimum {}", 2477 + byte_len, min_len 2478 + ), 1774 2479 }); 1775 2480 } 1776 2481 } ··· 1794 2499 if grapheme_count > max_graphemes { 1795 2500 return Err(ValidationError::StringValidationFailed { 1796 2501 path: ctx.path_string(), 1797 - message: format!("String grapheme count {} exceeds maximum {}", grapheme_count, max_graphemes), 2502 + message: format!( 2503 + "String grapheme count {} exceeds maximum {}", 2504 + grapheme_count, max_graphemes 2505 + ), 1798 2506 }); 1799 2507 } 1800 2508 } ··· 1806 2514 if grapheme_count < min_graphemes { 1807 2515 return Err(ValidationError::StringValidationFailed { 1808 2516 path: ctx.path_string(), 1809 - message: format!("String grapheme count {} is less than minimum {}", grapheme_count, min_graphemes), 2517 + message: format!( 2518 + "String grapheme count {} is less than minimum {}", 2519 + grapheme_count, min_graphemes 2520 + ), 1810 2521 }); 1811 2522 } 1812 2523 } ··· 1961 2672 if !accept_types.contains(&mime_type.to_string()) { 1962 2673 return Err(ValidationError::StringValidationFailed { 1963 2674 path: ctx.path_string(), 1964 - message: format!("MIME type '{}' not in accepted types: {}", mime_type, accept_types.join(", ")), 2675 + message: format!( 2676 + "MIME type '{}' not in accepted types: {}", 2677 + mime_type, 2678 + accept_types.join(", ") 2679 + ), 1965 2680 }); 1966 2681 } 1967 2682 } ··· 2009 2724 if !enum_values.contains(&default_val.to_string()) { 2010 2725 errors.push(format!( 2011 2726 "String property '{}' default value '{}' must be one of ({})", 2012 - field_path, default_val, enum_values.join("|") 2727 + field_path, 2728 + default_val, 2729 + enum_values.join("|") 2013 2730 )); 2014 2731 } 2015 2732 } ··· 2077 2794 2078 2795 // Check enum constraint 2079 2796 if let Some(enum_array) = prop_def.get("enum").and_then(|v| v.as_array()) { 2080 - let enum_values: Vec<i64> = enum_array 2081 - .iter() 2082 - .filter_map(|v| v.as_i64()) 2083 - .collect(); 2797 + let enum_values: Vec<i64> = enum_array.iter().filter_map(|v| v.as_i64()).collect(); 2084 2798 if !enum_values.contains(&default_val) { 2085 2799 errors.push(format!( 2086 2800 "Integer property '{}' default value {} must be one of ({})", 2087 - field_path, default_val, 2088 - enum_values.iter().map(|v| v.to_string()).collect::<Vec<_>>().join("|") 2801 + field_path, 2802 + default_val, 2803 + enum_values 2804 + .iter() 2805 + .map(|v| v.to_string()) 2806 + .collect::<Vec<_>>() 2807 + .join("|") 2089 2808 )); 2090 2809 } 2091 2810 } ··· 2131 2850 } 2132 2851 } 2133 2852 } 2134 - } 2853 + }
+13
packages/lexicon/mod.ts
··· 8 8 WasmLexiconValidator, 9 9 validate_string_format, 10 10 is_valid_nsid, 11 + validate_lexicons_and_get_errors, 11 12 } from "./wasm/slices_lexicon.js"; 12 13 13 14 // Initialize WASM module ··· 128 129 export async function isValidNsid(nsid: string): Promise<boolean> { 129 130 await ensureWasmInit(); 130 131 return is_valid_nsid(nsid); 132 + } 133 + 134 + /** 135 + * Validate lexicons and get errors without creating a validator instance 136 + * Returns a Map where keys are lexicon IDs and values are arrays of error messages 137 + */ 138 + export async function validateLexiconsAndGetErrors(lexicons: LexiconDoc[]): Promise<Map<string, string[]>> { 139 + await ensureWasmInit(); 140 + const lexiconsJson = JSON.stringify(lexicons); 141 + const result = validate_lexicons_and_get_errors(lexiconsJson); 142 + const errorMap = JSON.parse(result); 143 + return new Map(Object.entries(errorMap)); 131 144 } 132 145 133 146 // Re-export additional types from types module
packages/lexicon/wasm/lexicon_validator_bg.wasm

This is a binary file and will not be displayed.

+12
packages/lexicon/wasm/slices_lexicon.d.ts
··· 1 1 /* tslint:disable */ 2 2 /* eslint-disable */ 3 3 export function main(): void; 4 + /** 5 + * Validate lexicons and return errors without creating a validator instance 6 + * Returns a JSON string containing a map of lexicon ID to error arrays 7 + */ 8 + export function validate_lexicons_and_get_errors(lexicons_json: string): string; 4 9 export function validate_string_format(value: string, format: string): void; 5 10 export function is_valid_nsid(nsid: string): boolean; 6 11 export class WasmLexiconValidator { ··· 19 24 * Validate that all cross-lexicon references can be resolved 20 25 */ 21 26 validate_lexicon_set_completeness(): void; 27 + /** 28 + * Get validation errors grouped by lexicon ID 29 + * Returns a JSON string containing a map of lexicon ID to error arrays 30 + */ 31 + get_validation_errors_by_lexicon(): string; 22 32 } 23 33 24 34 export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; ··· 29 39 readonly wasmlexiconvalidator_new: (a: number, b: number) => [number, number, number]; 30 40 readonly wasmlexiconvalidator_validate_record: (a: number, b: number, c: number, d: number, e: number) => [number, number]; 31 41 readonly wasmlexiconvalidator_validate_lexicon_set_completeness: (a: number) => [number, number]; 42 + readonly wasmlexiconvalidator_get_validation_errors_by_lexicon: (a: number) => [number, number]; 43 + readonly validate_lexicons_and_get_errors: (a: number, b: number) => [number, number]; 32 44 readonly validate_string_format: (a: number, b: number, c: number, d: number) => [number, number]; 33 45 readonly is_valid_nsid: (a: number, b: number) => number; 34 46 readonly main: () => void;
+38
packages/lexicon/wasm/slices_lexicon.js
··· 105 105 return value; 106 106 } 107 107 /** 108 + * Validate lexicons and return errors without creating a validator instance 109 + * Returns a JSON string containing a map of lexicon ID to error arrays 110 + * @param {string} lexicons_json 111 + * @returns {string} 112 + */ 113 + export function validate_lexicons_and_get_errors(lexicons_json) { 114 + let deferred2_0; 115 + let deferred2_1; 116 + try { 117 + const ptr0 = passStringToWasm0(lexicons_json, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); 118 + const len0 = WASM_VECTOR_LEN; 119 + const ret = wasm.validate_lexicons_and_get_errors(ptr0, len0); 120 + deferred2_0 = ret[0]; 121 + deferred2_1 = ret[1]; 122 + return getStringFromWasm0(ret[0], ret[1]); 123 + } finally { 124 + wasm.__wbindgen_free(deferred2_0, deferred2_1, 1); 125 + } 126 + } 127 + 128 + /** 108 129 * @param {string} value 109 130 * @param {string} format 110 131 */ ··· 186 207 const ret = wasm.wasmlexiconvalidator_validate_lexicon_set_completeness(this.__wbg_ptr); 187 208 if (ret[1]) { 188 209 throw takeFromExternrefTable0(ret[0]); 210 + } 211 + } 212 + /** 213 + * Get validation errors grouped by lexicon ID 214 + * Returns a JSON string containing a map of lexicon ID to error arrays 215 + * @returns {string} 216 + */ 217 + get_validation_errors_by_lexicon() { 218 + let deferred1_0; 219 + let deferred1_1; 220 + try { 221 + const ret = wasm.wasmlexiconvalidator_get_validation_errors_by_lexicon(this.__wbg_ptr); 222 + deferred1_0 = ret[0]; 223 + deferred1_1 = ret[1]; 224 + return getStringFromWasm0(ret[0], ret[1]); 225 + } finally { 226 + wasm.__wbindgen_free(deferred1_0, deferred1_1, 1); 189 227 } 190 228 } 191 229 }
packages/lexicon/wasm/slices_lexicon_bg.wasm

This is a binary file and will not be displayed.

+2
packages/lexicon/wasm/slices_lexicon_bg.wasm.d.ts
··· 5 5 export const wasmlexiconvalidator_new: (a: number, b: number) => [number, number, number]; 6 6 export const wasmlexiconvalidator_validate_record: (a: number, b: number, c: number, d: number, e: number) => [number, number]; 7 7 export const wasmlexiconvalidator_validate_lexicon_set_completeness: (a: number) => [number, number]; 8 + export const wasmlexiconvalidator_get_validation_errors_by_lexicon: (a: number) => [number, number]; 9 + export const validate_lexicons_and_get_errors: (a: number, b: number) => [number, number]; 8 10 export const validate_string_format: (a: number, b: number, c: number, d: number) => [number, number]; 9 11 export const is_valid_nsid: (a: number, b: number) => number; 10 12 export const main: () => void;