Barazo default frontend barazo.forum

chore(workspace): add noUncheckedIndexedAccess, remove duplicate tooling (#65)

* chore(workspace): add strict TS flag and remove duplicate tooling

Add noUncheckedIndexedAccess to tsconfig and fix resulting type errors.
Remove per-repo prettier config (root config used). Clean up commitlint
config by removing redundant scope-empty rule.

* fix: restore prettier config for standalone CI compatibility

CI clones repos independently without the workspace root, so each repo
needs its own prettier config.

authored by

Guido X Jansen and committed by
GitHub
6913eeb4 620c0ca8

+8 -18
-6
commitlint.config.mjs
··· 1 - /** 2 - * Commitlint configuration 3 - * Conventional Commits enforced per CLAUDE.md 4 - * @see https://commitlint.js.org/#/reference-configuration 5 - */ 6 1 export default { 7 2 extends: ['@commitlint/config-conventional'], 8 3 rules: { ··· 25 20 'security', 26 21 ], 27 22 ], 28 - 'scope-empty': [0], 29 23 'subject-case': [0], 30 24 }, 31 25 }
-5
prettier.config.mjs
··· 1 - /** 2 - * Prettier configuration 3 - * @see https://prettier.io/docs/en/configuration.html 4 - */ 5 1 const config = { 6 2 semi: false, 7 3 singleQuote: true, 8 4 tabWidth: 2, 9 5 trailingComma: 'es5', 10 6 printWidth: 100, 11 - plugins: [], 12 7 } 13 8 14 9 export default config
+5 -5
src/app/robots.test.ts
··· 22 22 23 23 it('disallows admin, auth, API, and non-public pages', () => { 24 24 const rules = getRules() 25 - expect(rules[0].disallow).toEqual( 25 + expect(rules[0]!.disallow).toEqual( 26 26 expect.arrayContaining([ 27 27 '/admin/', 28 28 '/auth/', ··· 36 36 37 37 it('blocks SEO bots', () => { 38 38 const rules = getRules() 39 - expect(rules[1].userAgent).toEqual( 39 + expect(rules[1]!.userAgent).toEqual( 40 40 expect.arrayContaining(['SemrushBot', 'AhrefsBot', 'MJ12bot']) 41 41 ) 42 - expect(rules[1].disallow).toBe('/') 42 + expect(rules[1]!.disallow).toBe('/') 43 43 }) 44 44 45 45 it('blocks AI crawlers', () => { 46 46 const rules = getRules() 47 - expect(rules[2].userAgent).toEqual( 47 + expect(rules[2]!.userAgent).toEqual( 48 48 expect.arrayContaining(['GPTBot', 'ClaudeBot', 'CCBot', 'Google-Extended']) 49 49 ) 50 - expect(rules[2].disallow).toBe('/') 50 + expect(rules[2]!.disallow).toBe('/') 51 51 }) 52 52 53 53 it('includes sitemap directive', () => {
+1 -1
src/app/sitemap.test.ts
··· 161 161 const result = await sitemap() 162 162 // Should still return at least the homepage 163 163 expect(result.length).toBeGreaterThanOrEqual(1) 164 - expect(result[0].url).toBe('https://barazo.forum') 164 + expect(result[0]!.url).toBe('https://barazo.forum') 165 165 }) 166 166 167 167 it('excludes adult-rated categories from sitemap', async () => {
+1 -1
src/components/moderation-controls.tsx
··· 22 22 className?: string 23 23 } 24 24 25 - const ACTION_CONFIRMATIONS: Record<string, { title: string; description: string }> = { 25 + const ACTION_CONFIRMATIONS: Record<ModerationAction, { title: string; description: string }> = { 26 26 delete: { 27 27 title: 'Delete Topic', 28 28 description:
+1
tsconfig.json
··· 5 5 "allowJs": true, 6 6 "skipLibCheck": true, 7 7 "strict": true, 8 + "noUncheckedIndexedAccess": true, 8 9 "noEmit": true, 9 10 "esModuleInterop": true, 10 11 "module": "esnext",