a collection of lightweight TypeScript packages for AT Protocol, the protocol powering Bluesky

refactor(lex-cli): clean up the CLI a little bit

mary.my.id 7f63eceb dc9c91fd

verified
+329 -236
+9
.changeset/violet-files-switch.md
··· 1 + --- 2 + '@atcute/lex-cli': patch 3 + --- 4 + 5 + add config auto-discovery 6 + 7 + given that config files are pretty much a required part of lex-cli, the CLI tool now attempts to 8 + search for the presence of `lex.config.js` or `lex.config.ts` when a config file is not explicitly 9 + specified.
+2 -2
packages/definitions/atproto/package.json
··· 24 24 }, 25 25 "scripts": { 26 26 "build": "tsc", 27 - "pull": "lex-cli pull -c ./lex.config.js", 28 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 27 + "pull": "lex-cli pull", 28 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 29 29 "prepublish": "rm -rf dist; pnpm run build" 30 30 }, 31 31 "dependencies": {
+1 -1
packages/definitions/bluemoji/package.json
··· 20 20 }, 21 21 "scripts": { 22 22 "build": "tsc", 23 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 23 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 24 24 "prepublish": "rm -rf dist; pnpm run build" 25 25 }, 26 26 "dependencies": {
+2 -2
packages/definitions/bluesky/package.json
··· 26 26 }, 27 27 "scripts": { 28 28 "build": "tsc --project tsconfig.build.json", 29 - "pull": "lex-cli pull -c ./lex.config.js", 29 + "pull": "lex-cli pull", 30 30 "test": "vitest", 31 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 31 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 32 32 "prepublish": "rm -rf dist; pnpm run build" 33 33 }, 34 34 "dependencies": {
+2 -2
packages/definitions/frontpage/package.json
··· 25 25 }, 26 26 "scripts": { 27 27 "build": "tsc", 28 - "pull": "lex-cli pull -c ./lex.config.js", 29 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 28 + "pull": "lex-cli pull", 29 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 30 30 "prepublish": "rm -rf dist; pnpm run build" 31 31 }, 32 32 "dependencies": {
+2 -2
packages/definitions/leaflet/package.json
··· 25 25 }, 26 26 "scripts": { 27 27 "build": "tsc", 28 - "pull": "lex-cli pull -c ./lex.config.js", 29 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 28 + "pull": "lex-cli pull", 29 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 30 30 "prepublish": "rm -rf dist; pnpm run build" 31 31 }, 32 32 "dependencies": {
+2 -2
packages/definitions/lexicon-community/package.json
··· 20 20 }, 21 21 "scripts": { 22 22 "build": "tsc", 23 - "pull": "lex-cli pull -c ./lex.config.js", 24 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 23 + "pull": "lex-cli pull", 24 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 25 25 "prepublish": "rm -rf dist; pnpm run build" 26 26 }, 27 27 "dependencies": {
+1 -1
packages/definitions/microcosm/package.json
··· 21 21 }, 22 22 "scripts": { 23 23 "build": "tsc", 24 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 24 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 25 25 "prepublish": "rm -rf dist; pnpm run build" 26 26 }, 27 27 "dependencies": {
+2 -2
packages/definitions/ozone/package.json
··· 26 26 }, 27 27 "scripts": { 28 28 "build": "tsc", 29 - "pull": "lex-cli pull -c ./lex.config.js", 30 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 29 + "pull": "lex-cli pull", 30 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 31 31 "prepublish": "rm -rf dist; pnpm run build" 32 32 }, 33 33 "dependencies": {
+2 -2
packages/definitions/tangled/package.json
··· 25 25 }, 26 26 "scripts": { 27 27 "build": "tsc", 28 - "pull": "lex-cli pull -c ./lex.config.js", 29 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 28 + "pull": "lex-cli pull", 29 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 30 30 "prepublish": "rm -rf dist; pnpm run build" 31 31 }, 32 32 "dependencies": {
+2 -2
packages/definitions/whitewind/package.json
··· 25 25 }, 26 26 "scripts": { 27 27 "build": "tsc", 28 - "pull": "lex-cli pull -c ./lex.config.js", 29 - "generate": "rm -r ./lib/lexicons/; lex-cli generate -c ./lex.config.js", 28 + "pull": "lex-cli pull", 29 + "generate": "rm -r ./lib/lexicons/; lex-cli generate", 30 30 "prepublish": "rm -rf dist; pnpm run build" 31 31 }, 32 32 "dependencies": {
+6 -6
packages/lexicons/lex-cli/README.md
··· 20 20 then run the tool: 21 21 22 22 ``` 23 - npm exec lex-cli generate -c ./lex.config.js 23 + npm exec lex-cli generate 24 24 ``` 25 25 26 26 ## pulling lexicons ··· 53 53 pull the lexicons to disk, then generate types from them: 54 54 55 55 ``` 56 - npm exec lex-cli pull -c ./lex.config.js 57 - npm exec lex-cli generate -c ./lex.config.js 56 + npm exec lex-cli pull 57 + npm exec lex-cli generate 58 58 ``` 59 59 60 60 ## publishing your schemas 61 61 62 - if you're packaging your generated schemas as a publishable library, add the `atcute:lexicons` 63 - field to your package.json. this allows other projects to automatically discover and import your 64 - schemas without manual configuration. 62 + if you're packaging your generated schemas as a publishable library, add the `atcute:lexicons` field 63 + to your package.json. this allows other projects to automatically discover and import your schemas 64 + without manual configuration. 65 65 66 66 ```json 67 67 {
+6 -210
packages/lexicons/lex-cli/src/cli.ts
··· 1 - import * as fs from 'node:fs/promises'; 2 - import * as path from 'node:path'; 3 - 4 - import { lexiconDoc, refineLexiconDoc, type LexiconDoc } from '@atcute/lexicon-doc'; 5 - 6 - import { object } from '@optique/core/constructs'; 7 - import { command, constant, option } from '@optique/core/primitives'; 8 1 import { or } from '@optique/core/constructs'; 9 2 import { run } from '@optique/run'; 10 - import { path as pathParser } from '@optique/run/valueparser'; 11 - import pc from 'picocolors'; 12 - 13 - import { generateLexiconApi, type ImportMapping } from './codegen.js'; 14 - import { loadConfig } from './config.js'; 15 - import { packageJsonSchema } from './lexicon-metadata.js'; 16 - import { runPull } from './pull.js'; 17 3 18 - /** 19 - * Resolves package imports to ImportMapping[] 20 - */ 21 - const resolveImportsToMappings = async ( 22 - imports: string[], 23 - configDirname: string, 24 - ): Promise<ImportMapping[]> => { 25 - const mappings: ImportMapping[] = []; 26 - 27 - for (const packageName of imports) { 28 - // Walk up from config directory to find package in node_modules 29 - let packageJson: unknown; 30 - let currentDir = configDirname; 31 - let found = false; 32 - 33 - while (currentDir !== path.dirname(currentDir)) { 34 - const candidatePath = path.join(currentDir, 'node_modules', packageName, 'package.json'); 35 - try { 36 - const content = await fs.readFile(candidatePath, 'utf8'); 37 - packageJson = JSON.parse(content); 38 - found = true; 39 - break; 40 - } catch (err: any) { 41 - // Only continue to parent if file not found 42 - if (err.code !== 'ENOENT') { 43 - console.error(pc.bold(pc.red(`failed to read package.json for "${packageName}":`))); 44 - console.error(err); 45 - process.exit(1); 46 - } 47 - 48 - // Not found, try parent directory 49 - currentDir = path.dirname(currentDir); 50 - } 51 - } 52 - 53 - if (!found) { 54 - console.error(pc.bold(pc.red(`failed to resolve package "${packageName}"`))); 55 - console.error(`Could not find package in node_modules starting from ${configDirname}`); 56 - process.exit(1); 57 - } 58 - 59 - // Validate package.json 60 - const result = packageJsonSchema.try(packageJson, { mode: 'passthrough' }); 61 - if (!result.ok) { 62 - console.error(pc.bold(pc.red(`invalid atcute:lexicons in "${packageName}":`))); 63 - console.error(result.message); 64 - 65 - for (const issue of result.issues) { 66 - console.log(`- ${issue.code} at .${issue.path.join('.')}`); 67 - } 68 - 69 - process.exit(1); 70 - } 71 - 72 - const lexicons = result.value['atcute:lexicons']; 73 - if (!lexicons?.mappings) { 74 - continue; 75 - } 76 - 77 - // Convert mapping to ImportMapping[] 78 - for (const [pattern, entry] of Object.entries(lexicons.mappings)) { 79 - const isWildcard = pattern.endsWith('.*'); 80 - 81 - mappings.push({ 82 - nsid: [pattern], 83 - imports: (nsid: string) => { 84 - // Check if pattern matches 85 - if (isWildcard) { 86 - if (!nsid.startsWith(pattern.slice(0, -1))) { 87 - throw new Error(`NSID ${nsid} does not match pattern ${pattern}`); 88 - } 89 - } else { 90 - if (nsid !== pattern) { 91 - throw new Error(`NSID ${nsid} does not match pattern ${pattern}`); 92 - } 93 - } 94 - 95 - const nsidPrefix = isWildcard ? pattern.slice(0, -2) : pattern; 96 - const nsidRemainder = isWildcard ? nsid.slice(nsidPrefix.length + 1) : ''; 97 - 98 - let expandedPath = entry.path 99 - .replaceAll('{{nsid}}', nsid.replaceAll('.', '/')) 100 - .replaceAll('{{nsid_remainder}}', nsidRemainder.replaceAll('.', '/')) 101 - .replaceAll('{{nsid_prefix}}', nsidPrefix.replaceAll('.', '/')); 102 - 103 - if (expandedPath === '.') { 104 - expandedPath = packageName; 105 - } else if (expandedPath.startsWith('./')) { 106 - expandedPath = `${packageName}/${expandedPath.slice(2)}`; 107 - } 108 - 109 - return { 110 - type: entry.type, 111 - from: expandedPath, 112 - }; 113 - }, 114 - }); 115 - } 116 - } 117 - 118 - return mappings; 119 - }; 4 + import { generateCommandSchema, runGenerate } from './commands/generate.js'; 5 + import { pullCommandSchema, runPull } from './commands/pull.js'; 120 6 121 - const parser = or( 122 - command( 123 - 'generate', 124 - object({ 125 - type: constant('generate'), 126 - config: option('-c', '--config', pathParser({ metavar: 'CONFIG' })), 127 - }), 128 - ), 129 - command( 130 - 'pull', 131 - object({ 132 - type: constant('pull'), 133 - config: option('-c', '--config', pathParser({ metavar: 'CONFIG' })), 134 - }), 135 - ), 136 - ); 7 + const parser = or(generateCommandSchema, pullCommandSchema); 137 8 138 - const result = run(parser, { programName: 'lex-cli' }); 9 + const result = run(parser, { programName: 'lex-cli', help: 'both' }); 139 10 140 11 if (result.type === 'generate') { 141 - const config = await loadConfig(result.config); 142 - 143 - // Resolve imports to mappings 144 - const importMappings = config.imports ? await resolveImportsToMappings(config.imports, config.root) : []; 145 - const allMappings = [...importMappings, ...(config.mappings ?? [])]; 146 - 147 - const documents: LexiconDoc[] = []; 148 - 149 - for await (const filename of fs.glob(config.files, { cwd: config.root })) { 150 - let source: string; 151 - try { 152 - source = await fs.readFile(path.join(config.root, filename), 'utf8'); 153 - } catch (err) { 154 - console.error(pc.bold(pc.red(`file read error with "${filename}"`))); 155 - console.error(err); 156 - 157 - process.exit(1); 158 - } 159 - 160 - let json: unknown; 161 - try { 162 - json = JSON.parse(source); 163 - } catch (err) { 164 - console.error(pc.bold(pc.red(`json parse error in "${filename}"`))); 165 - console.error(err); 166 - 167 - process.exit(1); 168 - } 169 - 170 - const result = lexiconDoc.try(json, { mode: 'strip' }); 171 - if (!result.ok) { 172 - console.error(pc.bold(pc.red(`schema validation failed for "${filename}"`))); 173 - console.error(result.message); 174 - 175 - for (const issue of result.issues) { 176 - console.log(`- ${issue.code} at .${issue.path.join('.')}`); 177 - } 178 - 179 - process.exit(1); 180 - } 181 - 182 - const issues = refineLexiconDoc(result.value, true); 183 - if (issues.length > 0) { 184 - console.error(pc.bold(pc.red(`lint validation failed for "${filename}"`))); 185 - 186 - for (const issue of issues) { 187 - console.log(`- ${issue.message} at .${issue.path.join('.')}`); 188 - } 189 - 190 - process.exit(1); 191 - } 192 - 193 - documents.push(result.value); 194 - } 195 - 196 - const generationResult = await generateLexiconApi({ 197 - documents: documents, 198 - mappings: allMappings, 199 - modules: { 200 - importSuffix: config.modules?.importSuffix ?? '.js', 201 - }, 202 - prettier: { 203 - cwd: process.cwd(), 204 - }, 205 - }); 206 - 207 - const outdir = path.join(config.root, config.outdir); 208 - 209 - for (const file of generationResult.files) { 210 - const filename = path.join(outdir, file.filename); 211 - const dirname = path.dirname(filename); 212 - 213 - await fs.mkdir(dirname, { recursive: true }); 214 - await fs.writeFile(filename, file.code); 215 - } 12 + await runGenerate(result); 216 13 } else if (result.type === 'pull') { 217 - const config = await loadConfig(result.config); 218 - await runPull(config); 14 + await runPull(result); 219 15 }
+216
packages/lexicons/lex-cli/src/commands/generate.ts
··· 1 + import * as fs from 'node:fs/promises'; 2 + import * as path from 'node:path'; 3 + 4 + import { lexiconDoc, refineLexiconDoc, type LexiconDoc } from '@atcute/lexicon-doc'; 5 + 6 + import { merge, object } from '@optique/core/constructs'; 7 + import { message } from '@optique/core/message'; 8 + import { type InferValue } from '@optique/core/parser'; 9 + import { command, constant } from '@optique/core/primitives'; 10 + import pc from 'picocolors'; 11 + 12 + import { generateLexiconApi, type ImportMapping } from '../codegen.js'; 13 + import { loadConfig } from '../config.js'; 14 + import { packageJsonSchema } from '../lexicon-metadata.js'; 15 + import { sharedOptions } from '../shared-options.js'; 16 + 17 + /** 18 + * resolves package imports to ImportMapping[] 19 + */ 20 + const resolveImportsToMappings = async ( 21 + imports: string[], 22 + configDirname: string, 23 + ): Promise<ImportMapping[]> => { 24 + const mappings: ImportMapping[] = []; 25 + 26 + for (const packageName of imports) { 27 + // walk up from config directory to find package in node_modules 28 + let packageJson: unknown; 29 + let currentDir = configDirname; 30 + let found = false; 31 + 32 + while (currentDir !== path.dirname(currentDir)) { 33 + const candidatePath = path.join(currentDir, 'node_modules', packageName, 'package.json'); 34 + try { 35 + const content = await fs.readFile(candidatePath, 'utf8'); 36 + packageJson = JSON.parse(content); 37 + found = true; 38 + break; 39 + } catch (err: any) { 40 + // only continue to parent if file not found 41 + if (err.code !== 'ENOENT') { 42 + console.error(pc.bold(pc.red(`failed to read package.json for "${packageName}":`))); 43 + console.error(err); 44 + process.exit(1); 45 + } 46 + 47 + // not found, try parent directory 48 + currentDir = path.dirname(currentDir); 49 + } 50 + } 51 + 52 + if (!found) { 53 + console.error(pc.bold(pc.red(`failed to resolve package "${packageName}"`))); 54 + console.error(`Could not find package in node_modules starting from ${configDirname}`); 55 + process.exit(1); 56 + } 57 + 58 + // validate package.json 59 + const result = packageJsonSchema.try(packageJson, { mode: 'passthrough' }); 60 + if (!result.ok) { 61 + console.error(pc.bold(pc.red(`invalid atcute:lexicons in "${packageName}":`))); 62 + console.error(result.message); 63 + 64 + for (const issue of result.issues) { 65 + console.log(`- ${issue.code} at .${issue.path.join('.')}`); 66 + } 67 + 68 + process.exit(1); 69 + } 70 + 71 + const lexicons = result.value['atcute:lexicons']; 72 + if (!lexicons?.mappings) { 73 + continue; 74 + } 75 + 76 + // convert mapping to ImportMapping[] 77 + for (const [pattern, entry] of Object.entries(lexicons.mappings)) { 78 + const isWildcard = pattern.endsWith('.*'); 79 + 80 + mappings.push({ 81 + nsid: [pattern], 82 + imports: (nsid: string) => { 83 + // check if pattern matches 84 + if (isWildcard) { 85 + if (!nsid.startsWith(pattern.slice(0, -1))) { 86 + throw new Error(`NSID ${nsid} does not match pattern ${pattern}`); 87 + } 88 + } else { 89 + if (nsid !== pattern) { 90 + throw new Error(`NSID ${nsid} does not match pattern ${pattern}`); 91 + } 92 + } 93 + 94 + const nsidPrefix = isWildcard ? pattern.slice(0, -2) : pattern; 95 + const nsidRemainder = isWildcard ? nsid.slice(nsidPrefix.length + 1) : ''; 96 + 97 + let expandedPath = entry.path 98 + .replaceAll('{{nsid}}', nsid.replaceAll('.', '/')) 99 + .replaceAll('{{nsid_remainder}}', nsidRemainder.replaceAll('.', '/')) 100 + .replaceAll('{{nsid_prefix}}', nsidPrefix.replaceAll('.', '/')); 101 + 102 + if (expandedPath === '.') { 103 + expandedPath = packageName; 104 + } else if (expandedPath.startsWith('./')) { 105 + expandedPath = `${packageName}/${expandedPath.slice(2)}`; 106 + } 107 + 108 + return { 109 + type: entry.type, 110 + from: expandedPath, 111 + }; 112 + }, 113 + }); 114 + } 115 + } 116 + 117 + return mappings; 118 + }; 119 + 120 + export const generateCommandSchema = command( 121 + 'generate', 122 + merge( 123 + object({ 124 + type: constant('generate'), 125 + }), 126 + sharedOptions, 127 + ), 128 + { 129 + brief: message`generate type definitions from lexicon documents`, 130 + description: message`reads lexicon documents from the configured files and generates TypeScript type definitions and runtime validators.`, 131 + }, 132 + ); 133 + 134 + export type GenerateCommand = InferValue<typeof generateCommandSchema>; 135 + 136 + /** 137 + * runs the generate command to create type definitions from lexicon documents 138 + * @param args parsed command arguments 139 + */ 140 + export const runGenerate = async (args: GenerateCommand): Promise<void> => { 141 + const config = await loadConfig(args.config); 142 + 143 + // resolve imports to mappings 144 + const importMappings = config.imports ? await resolveImportsToMappings(config.imports, config.root) : []; 145 + const allMappings = [...importMappings, ...(config.mappings ?? [])]; 146 + 147 + const documents: LexiconDoc[] = []; 148 + 149 + for await (const filename of fs.glob(config.files, { cwd: config.root })) { 150 + let source: string; 151 + try { 152 + source = await fs.readFile(path.join(config.root, filename), 'utf8'); 153 + } catch (err) { 154 + console.error(pc.bold(pc.red(`file read error with "${filename}"`))); 155 + console.error(err); 156 + 157 + process.exit(1); 158 + } 159 + 160 + let json: unknown; 161 + try { 162 + json = JSON.parse(source); 163 + } catch (err) { 164 + console.error(pc.bold(pc.red(`json parse error in "${filename}"`))); 165 + console.error(err); 166 + 167 + process.exit(1); 168 + } 169 + 170 + const result = lexiconDoc.try(json, { mode: 'strip' }); 171 + if (!result.ok) { 172 + console.error(pc.bold(pc.red(`schema validation failed for "${filename}"`))); 173 + console.error(result.message); 174 + 175 + for (const issue of result.issues) { 176 + console.log(`- ${issue.code} at .${issue.path.join('.')}`); 177 + } 178 + 179 + process.exit(1); 180 + } 181 + 182 + const issues = refineLexiconDoc(result.value, true); 183 + if (issues.length > 0) { 184 + console.error(pc.bold(pc.red(`lint validation failed for "${filename}"`))); 185 + 186 + for (const issue of issues) { 187 + console.log(`- ${issue.message} at .${issue.path.join('.')}`); 188 + } 189 + 190 + process.exit(1); 191 + } 192 + 193 + documents.push(result.value); 194 + } 195 + 196 + const generationResult = await generateLexiconApi({ 197 + documents: documents, 198 + mappings: allMappings, 199 + modules: { 200 + importSuffix: config.modules?.importSuffix ?? '.js', 201 + }, 202 + prettier: { 203 + cwd: process.cwd(), 204 + }, 205 + }); 206 + 207 + const outdir = path.join(config.root, config.outdir); 208 + 209 + for (const file of generationResult.files) { 210 + const filename = path.join(outdir, file.filename); 211 + const dirname = path.dirname(filename); 212 + 213 + await fs.mkdir(dirname, { recursive: true }); 214 + await fs.writeFile(filename, file.code); 215 + } 216 + };
+33
packages/lexicons/lex-cli/src/commands/pull.ts
··· 1 + import { merge, object } from '@optique/core/constructs'; 2 + import { message } from '@optique/core/message'; 3 + import { type InferValue } from '@optique/core/parser'; 4 + import { command, constant } from '@optique/core/primitives'; 5 + 6 + import { loadConfig } from '../config.js'; 7 + import { runPull as runPullImpl } from '../pull.js'; 8 + import { sharedOptions } from '../shared-options.js'; 9 + 10 + export const pullCommandSchema = command( 11 + 'pull', 12 + merge( 13 + object({ 14 + type: constant('pull'), 15 + }), 16 + sharedOptions, 17 + ), 18 + { 19 + brief: message`pull lexicon documents from configured sources`, 20 + description: message`fetches lexicon documents from configured git repositories and writes them to the output directory.`, 21 + }, 22 + ); 23 + 24 + export type PullCommand = InferValue<typeof pullCommandSchema>; 25 + 26 + /** 27 + * runs the pull command to fetch lexicon documents from configured sources 28 + * @param args parsed command arguments 29 + */ 30 + export const runPull = async (args: PullCommand): Promise<void> => { 31 + const config = await loadConfig(args.config); 32 + await runPullImpl(config); 33 + };
+28 -2
packages/lexicons/lex-cli/src/config.ts
··· 1 + import * as fs from 'node:fs/promises'; 1 2 import * as path from 'node:path'; 2 3 import * as url from 'node:url'; 3 4 ··· 98 99 root: string; 99 100 } 100 101 101 - export const loadConfig = async (configPath: string): Promise<NormalizedConfig> => { 102 - const configFilename = path.resolve(configPath); 102 + export const loadConfig = async (configPath?: string): Promise<NormalizedConfig> => { 103 + let configFilename: string | undefined; 104 + 105 + if (configPath) { 106 + configFilename = path.resolve(configPath); 107 + } else { 108 + // try to find lex.config.js or lex.config.ts in the current directory 109 + const candidates = ['lex.config.js', 'lex.config.ts']; 110 + 111 + for (const candidate of candidates) { 112 + const candidatePath = path.resolve(candidate); 113 + try { 114 + await fs.access(candidatePath); 115 + configFilename = candidatePath; 116 + break; 117 + } catch { 118 + // file doesn't exist, try next candidate 119 + } 120 + } 121 + 122 + if (!configFilename) { 123 + console.error(pc.bold(pc.red(`config file not found`))); 124 + console.error(`looked for: ${candidates.join(', ')}`); 125 + process.exit(1); 126 + } 127 + } 128 + 103 129 const configDirname = path.dirname(configFilename); 104 130 105 131 let rawConfig: unknown;
+13
packages/lexicons/lex-cli/src/shared-options.ts
··· 1 + import { object } from '@optique/core/constructs'; 2 + import { message } from '@optique/core/message'; 3 + import { optional } from '@optique/core/modifiers'; 4 + import { option } from '@optique/core/primitives'; 5 + import { path as pathParser } from '@optique/run/valueparser'; 6 + 7 + export const sharedOptions = object(`Global options`, { 8 + config: optional( 9 + option('-c', '--config', pathParser({ metavar: 'CONFIG' }), { 10 + description: message`path to the lexicon configuration file. defaults to searching for lex.config.js or lex.config.ts in the current directory.`, 11 + }), 12 + ), 13 + });