🏰 Self-documenting Identifier Name Analyser for Go

feat: Initial commit

fuwn.net 856e2994

+2325
+1
.gitignore
··· 1 + bin/
+202
LICENSE-APACHE
··· 1 + 2 + Apache License 3 + Version 2.0, January 2004 4 + http://www.apache.org/licenses/ 5 + 6 + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 + 8 + 1. Definitions. 9 + 10 + "License" shall mean the terms and conditions for use, reproduction, 11 + and distribution as defined by Sections 1 through 9 of this document. 12 + 13 + "Licensor" shall mean the copyright owner or entity authorized by 14 + the copyright owner that is granting the License. 15 + 16 + "Legal Entity" shall mean the union of the acting entity and all 17 + other entities that control, are controlled by, or are under common 18 + control with that entity. For the purposes of this definition, 19 + "control" means (i) the power, direct or indirect, to cause the 20 + direction or management of such entity, whether by contract or 21 + otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 + outstanding shares, or (iii) beneficial ownership of such entity. 23 + 24 + "You" (or "Your") shall mean an individual or Legal Entity 25 + exercising permissions granted by this License. 26 + 27 + "Source" form shall mean the preferred form for making modifications, 28 + including but not limited to software source code, documentation 29 + source, and configuration files. 30 + 31 + "Object" form shall mean any form resulting from mechanical 32 + transformation or translation of a Source form, including but 33 + not limited to compiled object code, generated documentation, 34 + and conversions to other media types. 35 + 36 + "Work" shall mean the work of authorship, whether in Source or 37 + Object form, made available under the License, as indicated by a 38 + copyright notice that is included in or attached to the work 39 + (an example is provided in the Appendix below). 40 + 41 + "Derivative Works" shall mean any work, whether in Source or Object 42 + form, that is based on (or derived from) the Work and for which the 43 + editorial revisions, annotations, elaborations, or other modifications 44 + represent, as a whole, an original work of authorship. For the purposes 45 + of this License, Derivative Works shall not include works that remain 46 + separable from, or merely link (or bind by name) to the interfaces of, 47 + the Work and Derivative Works thereof. 48 + 49 + "Contribution" shall mean any work of authorship, including 50 + the original version of the Work and any modifications or additions 51 + to that Work or Derivative Works thereof, that is intentionally 52 + submitted to Licensor for inclusion in the Work by the copyright owner 53 + or by an individual or Legal Entity authorized to submit on behalf of 54 + the copyright owner. For the purposes of this definition, "submitted" 55 + means any form of electronic, verbal, or written communication sent 56 + to the Licensor or its representatives, including but not limited to 57 + communication on electronic mailing lists, source code control systems, 58 + and issue tracking systems that are managed by, or on behalf of, the 59 + Licensor for the purpose of discussing and improving the Work, but 60 + excluding communication that is conspicuously marked or otherwise 61 + designated in writing by the copyright owner as "Not a Contribution." 62 + 63 + "Contributor" shall mean Licensor and any individual or Legal Entity 64 + on behalf of whom a Contribution has been received by Licensor and 65 + subsequently incorporated within the Work. 66 + 67 + 2. Grant of Copyright License. Subject to the terms and conditions of 68 + this License, each Contributor hereby grants to You a perpetual, 69 + worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 + copyright license to reproduce, prepare Derivative Works of, 71 + publicly display, publicly perform, sublicense, and distribute the 72 + Work and such Derivative Works in Source or Object form. 73 + 74 + 3. Grant of Patent License. Subject to the terms and conditions of 75 + this License, each Contributor hereby grants to You a perpetual, 76 + worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 + (except as stated in this section) patent license to make, have made, 78 + use, offer to sell, sell, import, and otherwise transfer the Work, 79 + where such license applies only to those patent claims licensable 80 + by such Contributor that are necessarily infringed by their 81 + Contribution(s) alone or by combination of their Contribution(s) 82 + with the Work to which such Contribution(s) was submitted. If You 83 + institute patent litigation against any entity (including a 84 + cross-claim or counterclaim in a lawsuit) alleging that the Work 85 + or a Contribution incorporated within the Work constitutes direct 86 + or contributory patent infringement, then any patent licenses 87 + granted to You under this License for that Work shall terminate 88 + as of the date such litigation is filed. 89 + 90 + 4. Redistribution. You may reproduce and distribute copies of the 91 + Work or Derivative Works thereof in any medium, with or without 92 + modifications, and in Source or Object form, provided that You 93 + meet the following conditions: 94 + 95 + (a) You must give any other recipients of the Work or 96 + Derivative Works a copy of this License; and 97 + 98 + (b) You must cause any modified files to carry prominent notices 99 + stating that You changed the files; and 100 + 101 + (c) You must retain, in the Source form of any Derivative Works 102 + that You distribute, all copyright, patent, trademark, and 103 + attribution notices from the Source form of the Work, 104 + excluding those notices that do not pertain to any part of 105 + the Derivative Works; and 106 + 107 + (d) If the Work includes a "NOTICE" text file as part of its 108 + distribution, then any Derivative Works that You distribute must 109 + include a readable copy of the attribution notices contained 110 + within such NOTICE file, excluding those notices that do not 111 + pertain to any part of the Derivative Works, in at least one 112 + of the following places: within a NOTICE text file distributed 113 + as part of the Derivative Works; within the Source form or 114 + documentation, if provided along with the Derivative Works; or, 115 + within a display generated by the Derivative Works, if and 116 + wherever such third-party notices normally appear. The contents 117 + of the NOTICE file are for informational purposes only and 118 + do not modify the License. You may add Your own attribution 119 + notices within Derivative Works that You distribute, alongside 120 + or as an addendum to the NOTICE text from the Work, provided 121 + that such additional attribution notices cannot be construed 122 + as modifying the License. 123 + 124 + You may add Your own copyright statement to Your modifications and 125 + may provide additional or different license terms and conditions 126 + for use, reproduction, or distribution of Your modifications, or 127 + for any such Derivative Works as a whole, provided Your use, 128 + reproduction, and distribution of the Work otherwise complies with 129 + the conditions stated in this License. 130 + 131 + 5. Submission of Contributions. Unless You explicitly state otherwise, 132 + any Contribution intentionally submitted for inclusion in the Work 133 + by You to the Licensor shall be under the terms and conditions of 134 + this License, without any additional terms or conditions. 135 + Notwithstanding the above, nothing herein shall supersede or modify 136 + the terms of any separate license agreement you may have executed 137 + with Licensor regarding such Contributions. 138 + 139 + 6. Trademarks. This License does not grant permission to use the trade 140 + names, trademarks, service marks, or product names of the Licensor, 141 + except as required for reasonable and customary use in describing the 142 + origin of the Work and reproducing the content of the NOTICE file. 143 + 144 + 7. Disclaimer of Warranty. Unless required by applicable law or 145 + agreed to in writing, Licensor provides the Work (and each 146 + Contributor provides its Contributions) on an "AS IS" BASIS, 147 + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 + implied, including, without limitation, any warranties or conditions 149 + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 + PARTICULAR PURPOSE. You are solely responsible for determining the 151 + appropriateness of using or redistributing the Work and assume any 152 + risks associated with Your exercise of permissions under this License. 153 + 154 + 8. Limitation of Liability. In no event and under no legal theory, 155 + whether in tort (including negligence), contract, or otherwise, 156 + unless required by applicable law (such as deliberate and grossly 157 + negligent acts) or agreed to in writing, shall any Contributor be 158 + liable to You for damages, including any direct, indirect, special, 159 + incidental, or consequential damages of any character arising as a 160 + result of this License or out of the use or inability to use the 161 + Work (including but not limited to damages for loss of goodwill, 162 + work stoppage, computer failure or malfunction, or any and all 163 + other commercial damages or losses), even if such Contributor 164 + has been advised of the possibility of such damages. 165 + 166 + 9. Accepting Warranty or Additional Liability. While redistributing 167 + the Work or Derivative Works thereof, You may choose to offer, 168 + and charge a fee for, acceptance of support, warranty, indemnity, 169 + or other liability obligations and/or rights consistent with this 170 + License. However, in accepting such obligations, You may act only 171 + on Your own behalf and on Your sole responsibility, not on behalf 172 + of any other Contributor, and only if You agree to indemnify, 173 + defend, and hold each Contributor harmless for any liability 174 + incurred by, or claims asserted against, such Contributor by reason 175 + of your accepting any such warranty or additional liability. 176 + 177 + END OF TERMS AND CONDITIONS 178 + 179 + APPENDIX: How to apply the Apache License to your work. 180 + 181 + To apply the Apache License to your work, attach the following 182 + boilerplate notice, with the fields enclosed by brackets "[]" 183 + replaced with your own identifying information. (Don't include 184 + the brackets!) The text should be enclosed in the appropriate 185 + comment syntax for the file format. We also recommend that a 186 + file or class name and description of purpose be included on the 187 + same "printed page" as the copyright notice for easier 188 + identification within third-party archives. 189 + 190 + Copyright [yyyy] [name of copyright owner] 191 + 192 + Licensed under the Apache License, Version 2.0 (the "License"); 193 + you may not use this file except in compliance with the License. 194 + You may obtain a copy of the License at 195 + 196 + http://www.apache.org/licenses/LICENSE-2.0 197 + 198 + Unless required by applicable law or agreed to in writing, software 199 + distributed under the License is distributed on an "AS IS" BASIS, 200 + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 + See the License for the specific language governing permissions and 202 + limitations under the License.
+5
LICENSE-MIT
··· 1 + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 2 + 3 + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 4 + 5 + THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+164
README.md
··· 1 + # &#127984; Kivia 2 + 3 + > Self-documenting Name Analyser for Go 4 + 5 + Name things so your code explains itself. 6 + 7 + Kivia is a fast, local-only analyser that flags identifiers whose terms are absent from dictionary sources or resemble abbreviations of dictionary words. It is built for teams that want explicit, readable naming conventions without external AI calls. 8 + 9 + ## Philosophy 10 + 11 + Identifier names should be fully self-documenting. 12 + 13 + Kivia enforces a strict readability standard: 14 + 15 + - Prefer full words over shorthand 16 + - Avoid ambiguous abbreviations 17 + - Keep the naming intent clear from the identifier itself 18 + 19 + Examples: 20 + 21 + - `userNum` → invalid (`num` is an abbreviation) 22 + - `ctx` → invalid (`ctx` is an abbreviation) 23 + - `userCount` → valid 24 + - `requestContext` → valid 25 + 26 + ## Rules 27 + 28 + 1. **Dictionary words pass**: If a token is present in the loaded dictionary sources, it passes. 29 + 2. **Abbreviations are violations**: If a token expands to a dictionary word (for example, `ctx` -> `context`), it is flagged. 30 + 3. **Unknown terms are violations**: If a token is not in the dictionary and does not map to a known expansion, it is flagged. 31 + 4. **Minimum length is explicit**: `--min-eval-length` determines whether short identifiers are evaluated. 32 + 33 + Kivia also applies dictionary-backed spelling-variant matching for common British/American pairs (for example `normalise`/`normalize`, `colour`/`color`, `centre`/`center`). 34 + 35 + ## How It Works 36 + 37 + Kivia parses Go source using the standard library's AST, extracts identifiers, tokenises names (camel, snake, or kebab), and evaluates each token against a local NLP dictionary pipeline. 38 + 39 + - No network requests 40 + - No LLM/API dependency 41 + - Deterministic local analysis 42 + 43 + ## Installation 44 + 45 + ```bash 46 + go install github.com/Fuwn/kivia@latest 47 + ``` 48 + 49 + Or build locally: 50 + 51 + ```bash 52 + go build -o ./bin/kivia . 53 + ``` 54 + 55 + ## Usage 56 + 57 + ```bash 58 + # Analyse a package tree 59 + kivia --path ./... 60 + 61 + # Ignore single-letter names during evaluation 62 + kivia --path ./... --min-eval-length 2 63 + 64 + # Ignore specific violations 65 + kivia --path ./... --ignore name=ctx --ignore file=testdata 66 + 67 + # JSON output without context payload 68 + kivia --path ./... --format json --omit-context 69 + ``` 70 + 71 + ### Flags 72 + 73 + | Flag | Description | 74 + |------|-------------| 75 + | `--path` | Path to analyse (`directory`, `file`, or `./...`). | 76 + | `--omit-context` | Hide usage context in output. | 77 + | `--min-eval-length` | Minimum identifier length in runes to evaluate (must be `>= 1`). | 78 + | `--format` | Output format: `text` or `json`. | 79 + | `--fail-on-violation` | Exit with code `1` when violations are found. | 80 + | `--ignore` | Ignore violations by matcher. Repeatable. Prefixes: `name=`, `kind=`, `file=`, `reason=`, `func=`. | 81 + 82 + ## Ignore Matchers 83 + 84 + `--ignore` supports targeted filtering: 85 + 86 + - `name=<substring>` 87 + - `kind=<substring>` 88 + - `file=<substring>` 89 + - `reason=<substring>` 90 + - `func=<substring>` 91 + 92 + Without a prefix, the matcher is applied as a substring across all violation fields. 93 + 94 + Example: 95 + 96 + ```bash 97 + kivia --path ./... \ 98 + --ignore name=ctx \ 99 + --ignore reason=abbreviation \ 100 + --ignore file=_test.go 101 + ``` 102 + 103 + ## Output 104 + 105 + ### Text (default) 106 + 107 + ```text 108 + internal/example/sample.go:12:9 parameter "ctx": Contains abbreviation: ctx. 109 + context: type=context.Context, function=Handle 110 + ``` 111 + 112 + ### JSON 113 + 114 + ```json 115 + { 116 + "violations": [ 117 + { 118 + "identifier": { 119 + "name": "ctx", 120 + "kind": "parameter", 121 + "file": "internal/example/sample.go", 122 + "line": 12, 123 + "column": 9, 124 + "context": { 125 + "enclosingFunction": "Handle", 126 + "type": "context.Context" 127 + } 128 + }, 129 + "reason": "Contains abbreviation: ctx." 130 + } 131 + ] 132 + } 133 + ``` 134 + 135 + ## Identifier Scope (Go) 136 + 137 + Kivia currently extracts and evaluates: 138 + 139 + - Types 140 + - Functions and methods 141 + - Receivers 142 + - Parameters 143 + - Named results 144 + - Variables (`var`/`const` and `:=`) 145 + - Range keys and values 146 + - Struct fields 147 + - Interface methods 148 + 149 + ## Dictionary and NLP Source 150 + 151 + Kivia loads dictionary data only from configured/system dictionary files. 152 + 153 + 1. `KIVIA_DICTIONARY_PATH` (optional): one path or multiple paths separated by your OS path separator (`:` on macOS/Linux, `;` on Windows). Commas are also accepted. 154 + 2. If `KIVIA_DICTIONARY_PATH` is not set, Kivia uses a default set of dictionary files (for example, `/usr/share/dict/words`, `/usr/share/dict/web2`, and Hunspell dictionaries when present). 155 + 3. If no usable words are found, the analysis fails with an error. 156 + 157 + ## License 158 + 159 + Licensed under either of [Apache License, Version 2.0](LICENSE-APACHE) or 160 + [MIT license](LICENSE-MIT) at your option. 161 + 162 + Unless you explicitly state otherwise, any contribution intentionally submitted 163 + for inclusion in this crate by you, as defined in the Apache-2.0 license, shall 164 + be dual licensed as above, without any additional terms or conditions.
+5
go.mod
··· 1 + module github.com/Fuwn/kivia 2 + 3 + go 1.26 4 + 5 + require github.com/sajari/fuzzy v1.0.0
+2
go.sum
··· 1 + github.com/sajari/fuzzy v1.0.0 h1:+FmwVvJErsd0d0hAPlj4CxqxUtQY/fOoY0DwX4ykpRY= 2 + github.com/sajari/fuzzy v1.0.0/go.mod h1:OjYR6KxoWOe9+dOlXeiCJd4dIbED4Oo8wpS89o0pwOo=
+208
internal/analyze/analyze.go
··· 1 + package analyze 2 + 3 + import ( 4 + "github.com/Fuwn/kivia/internal/collect" 5 + "strings" 6 + "unicode" 7 + "unicode/utf8" 8 + ) 9 + 10 + type Options struct { 11 + MinEvaluationLength int 12 + } 13 + 14 + type Result struct { 15 + Violations []Violation `json:"violations"` 16 + } 17 + 18 + type Violation struct { 19 + Identifier collect.Identifier `json:"identifier"` 20 + Reason string `json:"reason"` 21 + } 22 + 23 + func Run(identifiers []collect.Identifier, options Options) (Result, error) { 24 + minimumEvaluationLength := options.MinEvaluationLength 25 + 26 + if minimumEvaluationLength <= 0 { 27 + minimumEvaluationLength = 1 28 + } 29 + 30 + resources, err := getResources() 31 + 32 + if err != nil { 33 + return Result{}, err 34 + } 35 + 36 + violations := make([]Violation, 0) 37 + 38 + for _, identifier := range identifiers { 39 + if utf8.RuneCountInString(strings.TrimSpace(identifier.Name)) < minimumEvaluationLength { 40 + continue 41 + } 42 + 43 + evaluation := evaluateIdentifier(identifier, resources, minimumEvaluationLength) 44 + 45 + if !evaluation.isViolation { 46 + continue 47 + } 48 + 49 + violation := Violation{ 50 + Identifier: identifier, 51 + Reason: evaluation.reason, 52 + } 53 + violations = append(violations, violation) 54 + } 55 + 56 + return Result{Violations: violations}, nil 57 + } 58 + 59 + type evaluationResult struct { 60 + isViolation bool 61 + reason string 62 + } 63 + 64 + func evaluateIdentifier(identifier collect.Identifier, resources resources, minimumTokenLength int) evaluationResult { 65 + name := strings.TrimSpace(identifier.Name) 66 + 67 + if name == "" { 68 + return evaluationResult{} 69 + } 70 + 71 + tokens := tokenize(name) 72 + 73 + if len(tokens) == 0 { 74 + return evaluationResult{} 75 + } 76 + 77 + for _, token := range tokens { 78 + if utf8.RuneCountInString(token) < minimumTokenLength { 79 + continue 80 + } 81 + 82 + if !isAlphabeticToken(token) { 83 + continue 84 + } 85 + 86 + if resources.dictionary.IsWord(token) { 87 + continue 88 + } 89 + 90 + if isUpperCaseToken(name, token) { 91 + continue 92 + } 93 + 94 + if isDisallowedAbbreviation(token, resources) { 95 + return evaluationResult{isViolation: true, reason: "Contains abbreviation: " + token + "."} 96 + } 97 + 98 + return evaluationResult{isViolation: true, reason: "Term not found in dictionary: " + token + "."} 99 + } 100 + 101 + return evaluationResult{} 102 + } 103 + 104 + func isUpperCaseToken(identifierName string, token string) bool { 105 + tokenLength := utf8.RuneCountInString(token) 106 + 107 + if tokenLength < 2 || tokenLength > 8 { 108 + return false 109 + } 110 + 111 + return strings.Contains(identifierName, strings.ToUpper(token)) 112 + } 113 + 114 + func tokenize(name string) []string { 115 + name = strings.TrimSpace(name) 116 + 117 + if name == "" { 118 + return nil 119 + } 120 + 121 + parts := strings.FieldsFunc(name, func(r rune) bool { 122 + return r == '_' || r == '-' || r == ' ' 123 + }) 124 + 125 + if len(parts) == 0 { 126 + return nil 127 + } 128 + 129 + result := make([]string, 0, len(parts)*2) 130 + 131 + for _, part := range parts { 132 + if part == "" { 133 + continue 134 + } 135 + 136 + result = append(result, splitCamel(part)...) 137 + } 138 + 139 + return result 140 + } 141 + 142 + func splitCamel(input string) []string { 143 + if input == "" { 144 + return nil 145 + } 146 + 147 + runes := []rune(input) 148 + 149 + if len(runes) == 0 { 150 + return nil 151 + } 152 + 153 + tokens := make([]string, 0, 2) 154 + start := 0 155 + 156 + for index := 1; index < len(runes); index++ { 157 + current := runes[index] 158 + previous := runes[index-1] 159 + next := rune(0) 160 + 161 + if index+1 < len(runes) { 162 + next = runes[index+1] 163 + } 164 + 165 + isBoundary := false 166 + 167 + if unicode.IsLower(previous) && unicode.IsUpper(current) { 168 + isBoundary = true 169 + } 170 + 171 + if unicode.IsDigit(previous) != unicode.IsDigit(current) { 172 + isBoundary = true 173 + } 174 + 175 + if unicode.IsUpper(previous) && unicode.IsUpper(current) && next != 0 && unicode.IsLower(next) { 176 + isBoundary = true 177 + } 178 + 179 + if isBoundary { 180 + tokens = append(tokens, strings.ToLower(string(runes[start:index]))) 181 + start = index 182 + } 183 + } 184 + 185 + tokens = append(tokens, strings.ToLower(string(runes[start:]))) 186 + 187 + return tokens 188 + } 189 + 190 + func isDisallowedAbbreviation(token string, resources resources) bool { 191 + _, hasExpansion := resources.dictionary.AbbreviationExpansion(token) 192 + 193 + return hasExpansion 194 + } 195 + 196 + func isAlphabeticToken(token string) bool { 197 + if token == "" { 198 + return false 199 + } 200 + 201 + for _, character := range token { 202 + if !unicode.IsLetter(character) { 203 + return false 204 + } 205 + } 206 + 207 + return true 208 + }
+174
internal/analyze/analyze_test.go
··· 1 + package analyze_test 2 + 3 + import ( 4 + "github.com/Fuwn/kivia/internal/analyze" 5 + "github.com/Fuwn/kivia/internal/collect" 6 + "os" 7 + "path/filepath" 8 + "testing" 9 + ) 10 + 11 + func dictionaryPathForTests(testingContext *testing.T) string { 12 + testingContext.Helper() 13 + 14 + return filepath.Join("..", "..", "testdata", "dictionary", "words.txt") 15 + } 16 + 17 + func TestAnalyzeFlagsAbbreviations(testingContext *testing.T) { 18 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPathForTests(testingContext)) 19 + 20 + root := filepath.Join("..", "..", "testdata", "samplepkg") 21 + identifiers, err := collect.FromPath(root) 22 + 23 + if err != nil { 24 + testingContext.Fatalf("collect.FromPath returned an error: %v", err) 25 + } 26 + 27 + result, err := analyze.Run(identifiers, analyze.Options{}) 28 + 29 + if err != nil { 30 + testingContext.Fatalf("analyze.Run returned an error: %v", err) 31 + } 32 + 33 + if len(result.Violations) == 0 { 34 + testingContext.Fatalf("Expected at least one violation, got none.") 35 + } 36 + 37 + mustContainViolation(testingContext, result, "ctx") 38 + mustContainViolation(testingContext, result, "userNum") 39 + mustContainViolation(testingContext, result, "usr") 40 + } 41 + 42 + func TestAnalyzeFlagsTechnicalTermsNotInDictionary(testingContext *testing.T) { 43 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPathForTests(testingContext)) 44 + 45 + identifiers := []collect.Identifier{ 46 + {Name: "userID", Kind: "variable"}, 47 + {Name: "httpClient", Kind: "variable"}, 48 + } 49 + result, err := analyze.Run(identifiers, analyze.Options{}) 50 + 51 + if err != nil { 52 + testingContext.Fatalf("analyze.Run returned an error: %v", err) 53 + } 54 + 55 + if len(result.Violations) == 0 { 56 + testingContext.Fatalf("Expected violations, got none.") 57 + } 58 + 59 + mustContainViolation(testingContext, result, "userID") 60 + mustContainViolation(testingContext, result, "httpClient") 61 + } 62 + 63 + func TestAnalyzeDoesNotFlagNormalDictionaryWords(testingContext *testing.T) { 64 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPathForTests(testingContext)) 65 + 66 + identifiers := []collect.Identifier{ 67 + {Name: "options", Kind: "variable"}, 68 + {Name: "parsedResource", Kind: "variable"}, 69 + {Name: "hasResources", Kind: "variable"}, 70 + {Name: "allowlist", Kind: "variable"}, 71 + } 72 + result, err := analyze.Run(identifiers, analyze.Options{}) 73 + 74 + if err != nil { 75 + testingContext.Fatalf("analyze.Run returned an error: %v", err) 76 + } 77 + 78 + if len(result.Violations) != 0 { 79 + testingContext.Fatalf("Expected no violations, got %d.", len(result.Violations)) 80 + } 81 + } 82 + 83 + func TestAnalyzeMinEvaluationLengthSkipsSingleLetterIdentifiers(testingContext *testing.T) { 84 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPathForTests(testingContext)) 85 + 86 + identifiers := []collect.Identifier{ 87 + {Name: "t", Kind: "parameter"}, 88 + {Name: "v", Kind: "receiver"}, 89 + {Name: "ctx", Kind: "parameter"}, 90 + } 91 + result, err := analyze.Run(identifiers, analyze.Options{ 92 + MinEvaluationLength: 2, 93 + }) 94 + 95 + if err != nil { 96 + testingContext.Fatalf("analyze.Run returned an error: %v", err) 97 + } 98 + 99 + if len(result.Violations) != 1 { 100 + testingContext.Fatalf("Expected one violation, got %d.", len(result.Violations)) 101 + } 102 + 103 + if result.Violations[0].Identifier.Name != "ctx" { 104 + testingContext.Fatalf("Expected only ctx to be evaluated, got %q.", result.Violations[0].Identifier.Name) 105 + } 106 + } 107 + 108 + func TestAnalyzeFlagsExpressionAbbreviation(testingContext *testing.T) { 109 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPathForTests(testingContext)) 110 + 111 + identifiers := []collect.Identifier{ 112 + {Name: "expr", Kind: "variable"}, 113 + } 114 + result, err := analyze.Run(identifiers, analyze.Options{ 115 + MinEvaluationLength: 1, 116 + }) 117 + 118 + if err != nil { 119 + testingContext.Fatalf("analyze.Run returned an error: %v", err) 120 + } 121 + 122 + if len(result.Violations) != 1 { 123 + testingContext.Fatalf("Expected one violation, got %d.", len(result.Violations)) 124 + } 125 + 126 + if result.Violations[0].Identifier.Name != "expr" { 127 + testingContext.Fatalf("Expected expr to be flagged, got %q.", result.Violations[0].Identifier.Name) 128 + } 129 + } 130 + 131 + func TestAnalyzeAllowsUpperCaseTokens(testingContext *testing.T) { 132 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPathForTests(testingContext)) 133 + 134 + identifiers := []collect.Identifier{ 135 + {Name: "JSON", Kind: "variable"}, 136 + } 137 + result, err := analyze.Run(identifiers, analyze.Options{}) 138 + 139 + if err != nil { 140 + testingContext.Fatalf("analyze.Run returned an error: %v", err) 141 + } 142 + 143 + if len(result.Violations) != 0 { 144 + testingContext.Fatalf("Expected no violations, got %d.", len(result.Violations)) 145 + } 146 + } 147 + 148 + func TestAnalyzeFailsWhenDictionaryIsUnavailable(testingContext *testing.T) { 149 + emptyDictionaryPath := filepath.Join(testingContext.TempDir(), "empty.txt") 150 + 151 + if err := os.WriteFile(emptyDictionaryPath, []byte("\n"), 0o644); err != nil { 152 + testingContext.Fatalf("os.WriteFile returned an error: %v", err) 153 + } 154 + 155 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", emptyDictionaryPath) 156 + 157 + _, err := analyze.Run([]collect.Identifier{{Name: "ctx", Kind: "parameter"}}, analyze.Options{}) 158 + 159 + if err == nil { 160 + testingContext.Fatalf("Expected analyze.Run to fail when dictionary data is unavailable.") 161 + } 162 + } 163 + 164 + func mustContainViolation(testingContext *testing.T, result analyze.Result, name string) { 165 + testingContext.Helper() 166 + 167 + for _, violation := range result.Violations { 168 + if violation.Identifier.Name == name { 169 + return 170 + } 171 + } 172 + 173 + testingContext.Fatalf("Expected a violation for %q.", name) 174 + }
+26
internal/analyze/resources.go
··· 1 + package analyze 2 + 3 + import ( 4 + "fmt" 5 + "github.com/Fuwn/kivia/internal/nlp" 6 + ) 7 + 8 + type resources struct { 9 + dictionary *nlp.Dictionary 10 + } 11 + 12 + func getResources() (resources, error) { 13 + return loadResources() 14 + } 15 + 16 + func loadResources() (resources, error) { 17 + dictionary, err := nlp.NewDictionary() 18 + 19 + if err != nil { 20 + return resources{}, fmt.Errorf("Failed to load dictionary: %w", err) 21 + } 22 + 23 + return resources{ 24 + dictionary: dictionary, 25 + }, nil 26 + }
+331
internal/collect/collect.go
··· 1 + package collect 2 + 3 + import ( 4 + "bytes" 5 + "fmt" 6 + "go/ast" 7 + "go/parser" 8 + "go/printer" 9 + "go/token" 10 + "io/fs" 11 + "os" 12 + "path/filepath" 13 + "sort" 14 + "strings" 15 + ) 16 + 17 + type Context struct { 18 + EnclosingFunction string `json:"enclosingFunction,omitempty"` 19 + Type string `json:"type,omitempty"` 20 + ValueExpression string `json:"valueExpression,omitempty"` 21 + ParentType string `json:"parentType,omitempty"` 22 + } 23 + 24 + type Identifier struct { 25 + Name string `json:"name"` 26 + Kind string `json:"kind"` 27 + File string `json:"file"` 28 + Line int `json:"line"` 29 + Column int `json:"column"` 30 + Context Context `json:"context"` 31 + } 32 + 33 + func FromPath(path string) ([]Identifier, error) { 34 + files, err := discoverFiles(path) 35 + 36 + if err != nil { 37 + return nil, err 38 + } 39 + 40 + fileSet := token.NewFileSet() 41 + identifiers := make([]Identifier, 0, 128) 42 + 43 + for _, filePath := range files { 44 + fileNode, parseErr := parser.ParseFile(fileSet, filePath, nil, parser.SkipObjectResolution) 45 + 46 + if parseErr != nil { 47 + return nil, fmt.Errorf("Failed to parse %s: %w", filePath, parseErr) 48 + } 49 + 50 + collector := visitor{ 51 + fileSet: fileSet, 52 + file: filePath, 53 + } 54 + 55 + ast.Walk(&collector, fileNode) 56 + 57 + identifiers = append(identifiers, collector.identifiers...) 58 + } 59 + 60 + return identifiers, nil 61 + } 62 + 63 + type visitor struct { 64 + fileSet *token.FileSet 65 + file string 66 + identifiers []Identifier 67 + functionStack []string 68 + typeStack []string 69 + } 70 + 71 + func (identifierVisitor *visitor) Visit(node ast.Node) ast.Visitor { 72 + switch typedNode := node.(type) { 73 + case *ast.FuncDecl: 74 + identifierVisitor.addIdentifier(typedNode.Name, "function", Context{}) 75 + 76 + identifierVisitor.functionStack = append(identifierVisitor.functionStack, typedNode.Name.Name) 77 + 78 + identifierVisitor.captureFieldList(typedNode.Recv, "receiver") 79 + identifierVisitor.captureFieldList(typedNode.Type.Params, "parameter") 80 + identifierVisitor.captureFieldList(typedNode.Type.Results, "result") 81 + 82 + return leaveScope(identifierVisitor, func() { 83 + identifierVisitor.functionStack = identifierVisitor.functionStack[:len(identifierVisitor.functionStack)-1] 84 + }) 85 + case *ast.TypeSpec: 86 + identifierVisitor.addIdentifier(typedNode.Name, "type", Context{}) 87 + 88 + identifierVisitor.typeStack = append(identifierVisitor.typeStack, typedNode.Name.Name) 89 + 90 + identifierVisitor.captureTypeMembers(typedNode.Name.Name, typedNode.Type) 91 + 92 + return leaveScope(identifierVisitor, func() { identifierVisitor.typeStack = identifierVisitor.typeStack[:len(identifierVisitor.typeStack)-1] }) 93 + case *ast.ValueSpec: 94 + declaredType := renderExpression(identifierVisitor.fileSet, typedNode.Type) 95 + rightHandValue := renderExpressionList(identifierVisitor.fileSet, typedNode.Values) 96 + 97 + for _, name := range typedNode.Names { 98 + identifierVisitor.addIdentifier(name, "variable", Context{Type: declaredType, ValueExpression: rightHandValue}) 99 + } 100 + case *ast.AssignStmt: 101 + if typedNode.Tok != token.DEFINE { 102 + break 103 + } 104 + 105 + rightHandValue := renderExpressionList(identifierVisitor.fileSet, typedNode.Rhs) 106 + 107 + for index, left := range typedNode.Lhs { 108 + identifierNode, ok := left.(*ast.Ident) 109 + 110 + if !ok { 111 + continue 112 + } 113 + 114 + assignmentContext := Context{ValueExpression: rightHandValue} 115 + 116 + if index < len(typedNode.Rhs) { 117 + assignmentContext.Type = inferTypeFromExpression(typedNode.Rhs[index]) 118 + } 119 + 120 + identifierVisitor.addIdentifier(identifierNode, "variable", assignmentContext) 121 + } 122 + case *ast.RangeStmt: 123 + if typedNode.Tok != token.DEFINE { 124 + break 125 + } 126 + 127 + if keyIdentifier, ok := typedNode.Key.(*ast.Ident); ok { 128 + identifierVisitor.addIdentifier(keyIdentifier, "rangeKey", Context{ValueExpression: renderExpression(identifierVisitor.fileSet, typedNode.X)}) 129 + } 130 + 131 + if valueIdentifier, ok := typedNode.Value.(*ast.Ident); ok { 132 + identifierVisitor.addIdentifier(valueIdentifier, "rangeValue", Context{ValueExpression: renderExpression(identifierVisitor.fileSet, typedNode.X)}) 133 + } 134 + } 135 + 136 + return identifierVisitor 137 + } 138 + 139 + type scopeExit struct { 140 + parent *visitor 141 + onLeave func() 142 + } 143 + 144 + func leaveScope(parent *visitor, onLeave func()) ast.Visitor { 145 + return &scopeExit{parent: parent, onLeave: onLeave} 146 + } 147 + 148 + func (scopeExitVisitor *scopeExit) Visit(node ast.Node) ast.Visitor { 149 + if node == nil { 150 + scopeExitVisitor.onLeave() 151 + 152 + return nil 153 + } 154 + 155 + return scopeExitVisitor.parent 156 + } 157 + 158 + func (identifierVisitor *visitor) captureFieldList(fields *ast.FieldList, kind string) { 159 + if fields == nil { 160 + return 161 + } 162 + 163 + for _, field := range fields.List { 164 + declaredType := renderExpression(identifierVisitor.fileSet, field.Type) 165 + 166 + for _, name := range field.Names { 167 + identifierVisitor.addIdentifier(name, kind, Context{Type: declaredType}) 168 + } 169 + } 170 + } 171 + 172 + func (identifierVisitor *visitor) captureTypeMembers(typeName string, typeExpression ast.Expr) { 173 + switch typedType := typeExpression.(type) { 174 + case *ast.StructType: 175 + if typedType.Fields == nil { 176 + return 177 + } 178 + 179 + for _, field := range typedType.Fields.List { 180 + memberType := renderExpression(identifierVisitor.fileSet, field.Type) 181 + 182 + for _, fieldName := range field.Names { 183 + identifierVisitor.addIdentifier(fieldName, "field", Context{Type: memberType, ParentType: typeName}) 184 + } 185 + } 186 + case *ast.InterfaceType: 187 + if typedType.Methods == nil { 188 + return 189 + } 190 + 191 + for _, method := range typedType.Methods.List { 192 + memberType := renderExpression(identifierVisitor.fileSet, method.Type) 193 + 194 + for _, methodName := range method.Names { 195 + identifierVisitor.addIdentifier(methodName, "interfaceMethod", Context{Type: memberType, ParentType: typeName}) 196 + } 197 + } 198 + } 199 + } 200 + 201 + func (identifierVisitor *visitor) addIdentifier(identifier *ast.Ident, kind string, context Context) { 202 + if identifier == nil || identifier.Name == "_" { 203 + return 204 + } 205 + 206 + position := identifierVisitor.fileSet.Position(identifier.NamePos) 207 + context.EnclosingFunction = currentFunction(identifierVisitor.functionStack) 208 + identifierVisitor.identifiers = append(identifierVisitor.identifiers, Identifier{ 209 + Name: identifier.Name, 210 + Kind: kind, 211 + File: identifierVisitor.file, 212 + Line: position.Line, 213 + Column: position.Column, 214 + Context: context, 215 + }) 216 + } 217 + 218 + func currentFunction(stack []string) string { 219 + if len(stack) == 0 { 220 + return "" 221 + } 222 + 223 + return stack[len(stack)-1] 224 + } 225 + 226 + func discoverFiles(path string) ([]string, error) { 227 + searchRoot := path 228 + recursive := false 229 + 230 + if strings.HasSuffix(path, "/...") { 231 + searchRoot = strings.TrimSuffix(path, "/...") 232 + recursive = true 233 + } 234 + 235 + if searchRoot == "" { 236 + searchRoot = "." 237 + } 238 + 239 + pathFileDetails, err := os.Stat(searchRoot) 240 + 241 + if err != nil { 242 + return nil, err 243 + } 244 + 245 + if !pathFileDetails.IsDir() { 246 + if strings.HasSuffix(searchRoot, ".go") { 247 + return []string{searchRoot}, nil 248 + } 249 + 250 + return nil, fmt.Errorf("Path %q is not a Go file.", searchRoot) 251 + } 252 + 253 + files := make([]string, 0, 64) 254 + walkErr := filepath.WalkDir(searchRoot, func(candidate string, entry fs.DirEntry, walkError error) error { 255 + if walkError != nil { 256 + return walkError 257 + } 258 + 259 + if entry.IsDir() { 260 + name := entry.Name() 261 + 262 + if name == ".git" || name == "vendor" || name == "node_modules" { 263 + return filepath.SkipDir 264 + } 265 + 266 + if !recursive && candidate != searchRoot { 267 + return filepath.SkipDir 268 + } 269 + 270 + return nil 271 + } 272 + 273 + if strings.HasSuffix(candidate, ".go") { 274 + files = append(files, candidate) 275 + } 276 + 277 + return nil 278 + }) 279 + 280 + if walkErr != nil { 281 + return nil, walkErr 282 + } 283 + 284 + sort.Strings(files) 285 + 286 + return files, nil 287 + } 288 + 289 + func renderExpression(fileSet *token.FileSet, expression ast.Expr) string { 290 + if expression == nil { 291 + return "" 292 + } 293 + 294 + var buffer bytes.Buffer 295 + 296 + if err := printer.Fprint(&buffer, fileSet, expression); err != nil { 297 + return "" 298 + } 299 + 300 + return buffer.String() 301 + } 302 + 303 + func renderExpressionList(fileSet *token.FileSet, expressions []ast.Expr) string { 304 + if len(expressions) == 0 { 305 + return "" 306 + } 307 + 308 + parts := make([]string, 0, len(expressions)) 309 + 310 + for _, expression := range expressions { 311 + parts = append(parts, renderExpression(fileSet, expression)) 312 + } 313 + 314 + return strings.Join(parts, ", ") 315 + } 316 + 317 + func inferTypeFromExpression(expression ast.Expr) string { 318 + switch typedExpression := expression.(type) { 319 + case *ast.CallExpr: 320 + switch functionExpression := typedExpression.Fun.(type) { 321 + case *ast.Ident: 322 + return functionExpression.Name 323 + case *ast.SelectorExpr: 324 + return functionExpression.Sel.Name 325 + } 326 + 327 + return "" 328 + default: 329 + return "" 330 + } 331 + }
+614
internal/nlp/dictionary.go
··· 1 + package nlp 2 + 3 + import ( 4 + "bufio" 5 + "errors" 6 + "fmt" 7 + "github.com/sajari/fuzzy" 8 + "os" 9 + "path" 10 + "path/filepath" 11 + "regexp" 12 + "sort" 13 + "strconv" 14 + "strings" 15 + "unicode/utf8" 16 + ) 17 + 18 + var wordPattern = regexp.MustCompile(`[A-Za-z]+`) 19 + 20 + type Dictionary struct { 21 + model *fuzzy.Model 22 + words map[string]struct{} 23 + wordsByFirstCharacter map[rune][]string 24 + } 25 + 26 + func NewDictionary() (*Dictionary, error) { 27 + words, err := loadWords() 28 + 29 + if err != nil { 30 + return nil, err 31 + } 32 + 33 + wordSet := makeWordSet(words) 34 + wordsByFirstCharacter := makeWordsByFirstCharacter(words) 35 + model, loadErr := loadCachedModel() 36 + 37 + if loadErr == nil { 38 + return &Dictionary{model: model, words: wordSet, wordsByFirstCharacter: wordsByFirstCharacter}, nil 39 + } 40 + 41 + model = fuzzy.NewModel() 42 + 43 + model.SetThreshold(1) 44 + model.SetDepth(1) 45 + model.SetUseAutocomplete(false) 46 + model.Train(words) 47 + 48 + _ = saveCachedModel(model) 49 + 50 + return &Dictionary{model: model, words: wordSet, wordsByFirstCharacter: wordsByFirstCharacter}, nil 51 + } 52 + 53 + func (dictionary *Dictionary) IsWord(token string) bool { 54 + token = normalizeToken(token) 55 + 56 + if token == "" || dictionary == nil { 57 + return false 58 + } 59 + 60 + return dictionary.isLexiconWord(token) 61 + } 62 + 63 + func (dictionary *Dictionary) Suggest(token string) string { 64 + token = normalizeToken(token) 65 + 66 + if token == "" || dictionary == nil || dictionary.model == nil { 67 + return "" 68 + } 69 + 70 + if dictionary.isLexiconWord(token) { 71 + return "" 72 + } 73 + 74 + suggestions := dictionary.model.SpellCheckSuggestions(token, 1) 75 + 76 + if len(suggestions) == 0 { 77 + return "" 78 + } 79 + 80 + if suggestions[0] == token { 81 + return "" 82 + } 83 + 84 + return suggestions[0] 85 + } 86 + 87 + func (dictionary *Dictionary) isLexiconWord(token string) bool { 88 + if dictionary == nil { 89 + return false 90 + } 91 + 92 + if _, ok := dictionary.words[token]; ok { 93 + return true 94 + } 95 + 96 + candidates := make([]string, 0, 16) 97 + candidates = append(candidates, inflectionCandidates(token)...) 98 + candidates = append(candidates, spellingVariantCandidates(token)...) 99 + 100 + for _, candidate := range inflectionCandidates(token) { 101 + candidates = append(candidates, spellingVariantCandidates(candidate)...) 102 + } 103 + 104 + uniqueCandidates := make(map[string]struct{}, len(candidates)) 105 + 106 + for _, candidate := range candidates { 107 + if candidate == "" || candidate == token { 108 + continue 109 + } 110 + 111 + if _, seen := uniqueCandidates[candidate]; seen { 112 + continue 113 + } 114 + 115 + uniqueCandidates[candidate] = struct{}{} 116 + 117 + if _, ok := dictionary.words[candidate]; ok { 118 + return true 119 + } 120 + } 121 + 122 + return false 123 + } 124 + 125 + func (dictionary *Dictionary) AbbreviationExpansion(token string) (string, bool) { 126 + token = normalizeToken(token) 127 + 128 + if token == "" || dictionary == nil { 129 + return "", false 130 + } 131 + 132 + tokenLength := utf8.RuneCountInString(token) 133 + 134 + if tokenLength <= 1 || tokenLength > 4 { 135 + return "", false 136 + } 137 + 138 + firstCharacter, _ := utf8.DecodeRuneInString(token) 139 + candidates := dictionary.wordsByFirstCharacter[firstCharacter] 140 + 141 + if len(candidates) == 0 { 142 + return "", false 143 + } 144 + 145 + bestCandidate := "" 146 + bestScore := 1 << 30 147 + 148 + for _, candidate := range candidates { 149 + if !isLikelyAbbreviationForToken(token, candidate) { 150 + continue 151 + } 152 + 153 + score := abbreviationScore(token, candidate) 154 + 155 + if score < bestScore { 156 + bestScore = score 157 + bestCandidate = candidate 158 + } 159 + } 160 + 161 + if bestCandidate == "" { 162 + return "", false 163 + } 164 + 165 + return bestCandidate, true 166 + } 167 + 168 + func isLikelyAbbreviationForToken(token string, candidate string) bool { 169 + if candidate == "" || token == "" || token == candidate { 170 + return false 171 + } 172 + 173 + tokenLength := utf8.RuneCountInString(token) 174 + candidateLength := utf8.RuneCountInString(candidate) 175 + 176 + if candidateLength <= tokenLength { 177 + return false 178 + } 179 + 180 + if !isSubsequence(token, candidate) { 181 + return false 182 + } 183 + 184 + if strings.HasPrefix(candidate, token) && tokenLength <= 4 { 185 + return true 186 + } 187 + 188 + tokenConsonants := consonantSkeleton(token) 189 + candidateConsonants := consonantSkeleton(candidate) 190 + 191 + if tokenConsonants == "" || candidateConsonants == "" { 192 + return false 193 + } 194 + 195 + if isSubsequence(tokenConsonants, candidateConsonants) && tokenLength <= 5 { 196 + return true 197 + } 198 + 199 + return false 200 + } 201 + 202 + func abbreviationScore(token string, candidate string) int { 203 + tokenLength := utf8.RuneCountInString(token) 204 + candidateLength := utf8.RuneCountInString(candidate) 205 + lengthGap := max(candidateLength-tokenLength, 0) 206 + score := lengthGap * 10 207 + 208 + if strings.HasPrefix(candidate, token) { 209 + score -= 3 210 + } 211 + 212 + return score 213 + } 214 + 215 + func isSubsequence(shorter string, longer string) bool { 216 + shorterRunes := []rune(shorter) 217 + longerRunes := []rune(longer) 218 + shorterIndex := 0 219 + 220 + for _, character := range longerRunes { 221 + if shorterIndex >= len(shorterRunes) { 222 + break 223 + } 224 + 225 + if shorterRunes[shorterIndex] == character { 226 + shorterIndex++ 227 + } 228 + } 229 + 230 + return shorterIndex == len(shorterRunes) 231 + } 232 + 233 + func consonantSkeleton(word string) string { 234 + var builder strings.Builder 235 + 236 + for _, character := range word { 237 + switch character { 238 + case 'a', 'e', 'i', 'o', 'u': 239 + continue 240 + default: 241 + builder.WriteRune(character) 242 + } 243 + } 244 + 245 + return builder.String() 246 + } 247 + 248 + func inflectionCandidates(token string) []string { 249 + candidates := make([]string, 0, 8) 250 + 251 + if strings.HasSuffix(token, "ies") && len(token) > 3 { 252 + candidates = append(candidates, token[:len(token)-3]+"y") 253 + } 254 + 255 + if strings.HasSuffix(token, "es") && len(token) > 2 { 256 + candidates = append(candidates, token[:len(token)-2]) 257 + } 258 + 259 + if strings.HasSuffix(token, "s") && len(token) > 1 { 260 + candidates = append(candidates, token[:len(token)-1]) 261 + } 262 + 263 + if strings.HasSuffix(token, "ed") && len(token) > 2 { 264 + candidateWithoutSuffix := token[:len(token)-2] 265 + candidates = append(candidates, candidateWithoutSuffix) 266 + candidates = append(candidates, candidateWithoutSuffix+"e") 267 + 268 + if len(candidateWithoutSuffix) >= 2 { 269 + lastCharacter := candidateWithoutSuffix[len(candidateWithoutSuffix)-1] 270 + secondToLastCharacter := candidateWithoutSuffix[len(candidateWithoutSuffix)-2] 271 + 272 + if lastCharacter == secondToLastCharacter { 273 + candidates = append(candidates, candidateWithoutSuffix[:len(candidateWithoutSuffix)-1]) 274 + } 275 + } 276 + } 277 + 278 + if strings.HasSuffix(token, "ing") && len(token) > 3 { 279 + candidateWithoutSuffix := token[:len(token)-3] 280 + candidates = append(candidates, candidateWithoutSuffix) 281 + candidates = append(candidates, candidateWithoutSuffix+"e") 282 + } 283 + 284 + if strings.HasSuffix(token, "er") && len(token) > 2 { 285 + candidateWithoutSuffix := token[:len(token)-2] 286 + candidates = append(candidates, candidateWithoutSuffix) 287 + candidates = append(candidates, candidateWithoutSuffix+"e") 288 + 289 + if len(candidateWithoutSuffix) >= 2 { 290 + lastCharacter := candidateWithoutSuffix[len(candidateWithoutSuffix)-1] 291 + secondToLastCharacter := candidateWithoutSuffix[len(candidateWithoutSuffix)-2] 292 + 293 + if lastCharacter == secondToLastCharacter { 294 + candidates = append(candidates, candidateWithoutSuffix[:len(candidateWithoutSuffix)-1]) 295 + } 296 + } 297 + } 298 + 299 + if strings.HasSuffix(token, "ize") && len(token) > 3 { 300 + candidates = append(candidates, token[:len(token)-3]) 301 + } 302 + 303 + if strings.HasSuffix(token, "ized") && len(token) > 4 { 304 + candidates = append(candidates, token[:len(token)-4]) 305 + } 306 + 307 + if strings.HasSuffix(token, "izing") && len(token) > 5 { 308 + candidates = append(candidates, token[:len(token)-5]) 309 + } 310 + 311 + if strings.HasSuffix(token, "izer") && len(token) > 4 { 312 + candidates = append(candidates, token[:len(token)-4]) 313 + } 314 + 315 + if strings.HasSuffix(token, "ization") && len(token) > 7 { 316 + candidates = append(candidates, token[:len(token)-7]) 317 + } 318 + 319 + return candidates 320 + } 321 + 322 + func spellingVariantCandidates(token string) []string { 323 + candidates := make([]string, 0, 8) 324 + 325 + appendSuffixVariant(&candidates, token, "isation", "ization") 326 + appendSuffixVariant(&candidates, token, "ization", "isation") 327 + appendSuffixVariant(&candidates, token, "ising", "izing") 328 + appendSuffixVariant(&candidates, token, "izing", "ising") 329 + appendSuffixVariant(&candidates, token, "ised", "ized") 330 + appendSuffixVariant(&candidates, token, "ized", "ised") 331 + appendSuffixVariant(&candidates, token, "iser", "izer") 332 + appendSuffixVariant(&candidates, token, "izer", "iser") 333 + appendSuffixVariant(&candidates, token, "ise", "ize") 334 + appendSuffixVariant(&candidates, token, "ize", "ise") 335 + appendSuffixVariant(&candidates, token, "our", "or") 336 + appendSuffixVariant(&candidates, token, "or", "our") 337 + appendSuffixVariant(&candidates, token, "tre", "ter") 338 + appendSuffixVariant(&candidates, token, "ter", "tre") 339 + 340 + return candidates 341 + } 342 + 343 + func appendSuffixVariant(candidates *[]string, token string, fromSuffix string, toSuffix string) { 344 + if !strings.HasSuffix(token, fromSuffix) || len(token) <= len(fromSuffix) { 345 + return 346 + } 347 + 348 + root := token[:len(token)-len(fromSuffix)] 349 + *candidates = append(*candidates, root+toSuffix) 350 + } 351 + 352 + func makeWordSet(words []string) map[string]struct{} { 353 + set := make(map[string]struct{}, len(words)) 354 + 355 + for _, word := range words { 356 + set[word] = struct{}{} 357 + } 358 + 359 + return set 360 + } 361 + 362 + func makeWordsByFirstCharacter(words []string) map[rune][]string { 363 + grouped := make(map[rune][]string) 364 + 365 + for _, word := range words { 366 + firstCharacter, size := utf8.DecodeRuneInString(word) 367 + 368 + if firstCharacter == utf8.RuneError && size == 0 { 369 + continue 370 + } 371 + 372 + grouped[firstCharacter] = append(grouped[firstCharacter], word) 373 + } 374 + 375 + for firstCharacter := range grouped { 376 + sort.Strings(grouped[firstCharacter]) 377 + } 378 + 379 + return grouped 380 + } 381 + 382 + func loadWords() ([]string, error) { 383 + configuredDictionaryPaths := parseDictionaryPaths(os.Getenv("KIVIA_DICTIONARY_PATH")) 384 + 385 + if len(configuredDictionaryPaths) > 0 { 386 + words, err := loadWordsFromPaths(configuredDictionaryPaths, true) 387 + 388 + if err != nil { 389 + return nil, err 390 + } 391 + 392 + if len(words) == 0 { 393 + return nil, errors.New("configured dictionary sources contain no usable words") 394 + } 395 + 396 + return words, nil 397 + } 398 + 399 + words, err := loadWordsFromPaths(defaultDictionaryPaths, false) 400 + 401 + if err != nil { 402 + return nil, err 403 + } 404 + 405 + if len(words) == 0 { 406 + return nil, errors.New("no usable dictionary words found; set KIVIA_DICTIONARY_PATH") 407 + } 408 + 409 + return words, nil 410 + } 411 + 412 + func readWordsFromFile(filePath string) ([]string, error) { 413 + file, err := os.Open(filePath) 414 + 415 + if err != nil { 416 + return nil, err 417 + } 418 + 419 + defer file.Close() 420 + 421 + words := make([]string, 0, 1024) 422 + scanner := bufio.NewScanner(file) 423 + isSpellDictionaryFile := strings.EqualFold(path.Ext(filePath), ".dic") 424 + lineNumber := 0 425 + 426 + for scanner.Scan() { 427 + lineNumber++ 428 + 429 + line := normalizeDictionaryLine(scanner.Text(), lineNumber, isSpellDictionaryFile) 430 + 431 + if line == "" { 432 + continue 433 + } 434 + 435 + words = append(words, line) 436 + } 437 + 438 + if err := scanner.Err(); err != nil { 439 + return nil, err 440 + } 441 + 442 + return normalizeWords(words), nil 443 + } 444 + 445 + func parseDictionaryPaths(value string) []string { 446 + trimmedValue := strings.TrimSpace(value) 447 + 448 + if trimmedValue == "" { 449 + return nil 450 + } 451 + 452 + expandedValue := strings.ReplaceAll(trimmedValue, ",", string(os.PathListSeparator)) 453 + parts := strings.Split(expandedValue, string(os.PathListSeparator)) 454 + paths := make([]string, 0, len(parts)) 455 + 456 + for _, entry := range parts { 457 + candidate := strings.TrimSpace(entry) 458 + 459 + if candidate == "" { 460 + continue 461 + } 462 + 463 + paths = append(paths, candidate) 464 + } 465 + 466 + return paths 467 + } 468 + 469 + func loadWordsFromPaths(paths []string, strict bool) ([]string, error) { 470 + combinedWords := make([]string, 0, 4096) 471 + 472 + for _, dictionaryPath := range paths { 473 + words, err := readWordsFromFile(dictionaryPath) 474 + 475 + if err != nil { 476 + if strict { 477 + return nil, fmt.Errorf("failed to read dictionary %q: %w", dictionaryPath, err) 478 + } 479 + 480 + continue 481 + } 482 + 483 + combinedWords = append(combinedWords, words...) 484 + } 485 + 486 + return normalizeWords(combinedWords), nil 487 + } 488 + 489 + func normalizeDictionaryLine(line string, lineNumber int, isSpellDictionaryFile bool) string { 490 + trimmedLine := strings.TrimSpace(line) 491 + 492 + if trimmedLine == "" || strings.HasPrefix(trimmedLine, "#") { 493 + return "" 494 + } 495 + 496 + if isSpellDictionaryFile && lineNumber == 1 { 497 + if _, err := strconv.Atoi(trimmedLine); err == nil { 498 + return "" 499 + } 500 + } 501 + 502 + if slashIndex := strings.Index(trimmedLine, "/"); slashIndex >= 0 { 503 + trimmedLine = trimmedLine[:slashIndex] 504 + } 505 + 506 + return trimmedLine 507 + } 508 + 509 + func normalizeWords(words []string) []string { 510 + unique := make(map[string]struct{}, len(words)) 511 + 512 + for _, word := range words { 513 + normalized := normalizeToken(word) 514 + 515 + if normalized == "" { 516 + continue 517 + } 518 + 519 + if len(normalized) <= 1 { 520 + continue 521 + } 522 + 523 + unique[normalized] = struct{}{} 524 + } 525 + 526 + output := make([]string, 0, len(unique)) 527 + 528 + for word := range unique { 529 + output = append(output, word) 530 + } 531 + 532 + sort.Strings(output) 533 + 534 + return output 535 + } 536 + 537 + func normalizeToken(token string) string { 538 + token = strings.ToLower(strings.TrimSpace(token)) 539 + 540 + if token == "" { 541 + return "" 542 + } 543 + 544 + match := wordPattern.FindString(token) 545 + 546 + if match == "" { 547 + return "" 548 + } 549 + 550 + return match 551 + } 552 + 553 + func cachePath() (string, error) { 554 + base, err := os.UserCacheDir() 555 + 556 + if err != nil { 557 + return "", err 558 + } 559 + 560 + return filepath.Join(base, "kivia", "fuzzy_model_v1.json"), nil 561 + } 562 + 563 + func loadCachedModel() (*fuzzy.Model, error) { 564 + path, err := cachePath() 565 + 566 + if err != nil { 567 + return nil, err 568 + } 569 + 570 + model, err := fuzzy.Load(path) 571 + 572 + if err != nil { 573 + return nil, err 574 + } 575 + 576 + return model, nil 577 + } 578 + 579 + func saveCachedModel(model *fuzzy.Model) error { 580 + if model == nil { 581 + return errors.New("Model cannot be nil.") 582 + } 583 + 584 + path, err := cachePath() 585 + 586 + if err != nil { 587 + return err 588 + } 589 + 590 + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { 591 + return err 592 + } 593 + 594 + return model.Save(path) 595 + } 596 + 597 + var defaultDictionaryPaths = []string{ 598 + "/usr/share/dict/words", 599 + "/usr/dict/words", 600 + "/usr/share/dict/web2", 601 + "/usr/share/dict/web2a", 602 + "/usr/share/dict/propernames", 603 + "/usr/share/dict/connectives", 604 + "/usr/share/hunspell/en_US.dic", 605 + "/usr/share/hunspell/en_GB.dic", 606 + "/usr/share/hunspell/en_CA.dic", 607 + "/usr/share/hunspell/en_AU.dic", 608 + "/usr/share/myspell/en_US.dic", 609 + "/usr/share/myspell/en_GB.dic", 610 + "/opt/homebrew/share/hunspell/en_US.dic", 611 + "/opt/homebrew/share/hunspell/en_GB.dic", 612 + "/usr/local/share/hunspell/en_US.dic", 613 + "/usr/local/share/hunspell/en_GB.dic", 614 + }
+165
internal/nlp/dictionary_test.go
··· 1 + package nlp_test 2 + 3 + import ( 4 + "github.com/Fuwn/kivia/internal/nlp" 5 + "os" 6 + "path/filepath" 7 + "strings" 8 + "testing" 9 + ) 10 + 11 + func TestDictionaryRecognizesLexiconWords(testingContext *testing.T) { 12 + dictionaryFile := filepath.Join("..", "..", "testdata", "dictionary", "words.txt") 13 + 14 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryFile) 15 + 16 + dictionary, err := nlp.NewDictionary() 17 + 18 + if err != nil { 19 + testingContext.Fatalf("NewDictionary returned an error: %v", err) 20 + } 21 + 22 + if !dictionary.IsWord("options") { 23 + testingContext.Fatalf("Expected options to be recognized.") 24 + } 25 + 26 + if !dictionary.IsWord("has") { 27 + testingContext.Fatalf("Expected has to be recognized.") 28 + } 29 + 30 + if !dictionary.IsWord("resources") { 31 + testingContext.Fatalf("Expected resources to be recognized through plural inflection.") 32 + } 33 + } 34 + 35 + func TestDictionaryFindsAbbreviationExpansions(testingContext *testing.T) { 36 + dictionaryFile := filepath.Join("..", "..", "testdata", "dictionary", "words.txt") 37 + 38 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryFile) 39 + 40 + dictionary, err := nlp.NewDictionary() 41 + 42 + if err != nil { 43 + testingContext.Fatalf("NewDictionary returned an error: %v", err) 44 + } 45 + 46 + cases := map[string]string{ 47 + "expr": "expression", 48 + "ctx": "context", 49 + "err": "error", 50 + } 51 + 52 + for token, expectedExpansion := range cases { 53 + expansion, ok := dictionary.AbbreviationExpansion(token) 54 + 55 + if !ok { 56 + testingContext.Fatalf("Expected an abbreviation expansion for %q.", token) 57 + } 58 + 59 + if expansion != expectedExpansion { 60 + testingContext.Fatalf("Expected %q to expand to %q, got %q.", token, expectedExpansion, expansion) 61 + } 62 + } 63 + } 64 + 65 + func TestDictionaryLoadsFromMultipleDictionaryFiles(testingContext *testing.T) { 66 + tempDirectory := testingContext.TempDir() 67 + firstDictionaryPath := filepath.Join(tempDirectory, "first.txt") 68 + secondDictionaryPath := filepath.Join(tempDirectory, "second.txt") 69 + combinedPathList := strings.Join([]string{firstDictionaryPath, secondDictionaryPath}, string(os.PathListSeparator)) 70 + 71 + if err := os.WriteFile(firstDictionaryPath, []byte("alpha\n"), 0o644); err != nil { 72 + testingContext.Fatalf("os.WriteFile returned an error: %v", err) 73 + } 74 + 75 + if err := os.WriteFile(secondDictionaryPath, []byte("beta\n"), 0o644); err != nil { 76 + testingContext.Fatalf("os.WriteFile returned an error: %v", err) 77 + } 78 + 79 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", combinedPathList) 80 + 81 + dictionary, err := nlp.NewDictionary() 82 + 83 + if err != nil { 84 + testingContext.Fatalf("NewDictionary returned an error: %v", err) 85 + } 86 + 87 + if !dictionary.IsWord("alpha") { 88 + testingContext.Fatalf("Expected alpha to be recognized.") 89 + } 90 + 91 + if !dictionary.IsWord("beta") { 92 + testingContext.Fatalf("Expected beta to be recognized.") 93 + } 94 + } 95 + 96 + func TestDictionaryFailsWhenConfiguredPathHasNoWords(testingContext *testing.T) { 97 + tempDirectory := testingContext.TempDir() 98 + emptyDictionaryPath := filepath.Join(tempDirectory, "empty.txt") 99 + 100 + if err := os.WriteFile(emptyDictionaryPath, []byte("\n"), 0o644); err != nil { 101 + testingContext.Fatalf("os.WriteFile returned an error: %v", err) 102 + } 103 + 104 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", emptyDictionaryPath) 105 + 106 + _, err := nlp.NewDictionary() 107 + 108 + if err == nil { 109 + testingContext.Fatalf("Expected NewDictionary to fail when configured dictionary has no usable words.") 110 + } 111 + } 112 + 113 + func TestDictionaryRecognizesDerivedForms(testingContext *testing.T) { 114 + tempDirectory := testingContext.TempDir() 115 + dictionaryPath := filepath.Join(tempDirectory, "base_words.txt") 116 + 117 + if err := os.WriteFile(dictionaryPath, []byte("trim\ntoken\n"), 0o644); err != nil { 118 + testingContext.Fatalf("os.WriteFile returned an error: %v", err) 119 + } 120 + 121 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPath) 122 + 123 + dictionary, err := nlp.NewDictionary() 124 + 125 + if err != nil { 126 + testingContext.Fatalf("NewDictionary returned an error: %v", err) 127 + } 128 + 129 + if !dictionary.IsWord("trimmed") { 130 + testingContext.Fatalf("Expected trimmed to be recognized from trim.") 131 + } 132 + 133 + if !dictionary.IsWord("tokenize") { 134 + testingContext.Fatalf("Expected tokenize to be recognized from token.") 135 + } 136 + } 137 + 138 + func TestDictionaryRecognizesBritishAndAmericanVariants(testingContext *testing.T) { 139 + tempDirectory := testingContext.TempDir() 140 + dictionaryPath := filepath.Join(tempDirectory, "british_words.txt") 141 + 142 + if err := os.WriteFile(dictionaryPath, []byte("normalise\ncolour\ncentre\n"), 0o644); err != nil { 143 + testingContext.Fatalf("os.WriteFile returned an error: %v", err) 144 + } 145 + 146 + testingContext.Setenv("KIVIA_DICTIONARY_PATH", dictionaryPath) 147 + 148 + dictionary, err := nlp.NewDictionary() 149 + 150 + if err != nil { 151 + testingContext.Fatalf("NewDictionary returned an error: %v", err) 152 + } 153 + 154 + if !dictionary.IsWord("normalize") { 155 + testingContext.Fatalf("Expected normalize to be recognized from normalise.") 156 + } 157 + 158 + if !dictionary.IsWord("color") { 159 + testingContext.Fatalf("Expected color to be recognized from colour.") 160 + } 161 + 162 + if !dictionary.IsWord("center") { 163 + testingContext.Fatalf("Expected center to be recognized from centre.") 164 + } 165 + }
+80
internal/report/report.go
··· 1 + package report 2 + 3 + import ( 4 + "encoding/json" 5 + "fmt" 6 + "github.com/Fuwn/kivia/internal/analyze" 7 + "github.com/Fuwn/kivia/internal/collect" 8 + "io" 9 + "strings" 10 + ) 11 + 12 + func Render(writer io.Writer, result analyze.Result, format string, includeContext bool) error { 13 + switch strings.ToLower(format) { 14 + case "json": 15 + return renderJSON(writer, result, includeContext) 16 + case "text", "": 17 + return renderText(writer, result, includeContext) 18 + default: 19 + return fmt.Errorf("Unsupported output format %q. Use \"text\" or \"json\".", format) 20 + } 21 + } 22 + 23 + func renderText(writer io.Writer, result analyze.Result, includeContext bool) error { 24 + if len(result.Violations) == 0 { 25 + _, err := fmt.Fprintln(writer, "No naming violations found.") 26 + 27 + return err 28 + } 29 + 30 + for _, violation := range result.Violations { 31 + if _, err := fmt.Fprintf(writer, "%s:%d:%d %s %q: %s\n", 32 + violation.Identifier.File, 33 + violation.Identifier.Line, 34 + violation.Identifier.Column, 35 + violation.Identifier.Kind, 36 + violation.Identifier.Name, 37 + violation.Reason, 38 + ); err != nil { 39 + return err 40 + } 41 + 42 + if includeContext { 43 + contextParts := make([]string, 0, 3) 44 + 45 + if violation.Identifier.Context.Type != "" { 46 + contextParts = append(contextParts, "type="+violation.Identifier.Context.Type) 47 + } 48 + 49 + if violation.Identifier.Context.ValueExpression != "" { 50 + contextParts = append(contextParts, "value="+violation.Identifier.Context.ValueExpression) 51 + } 52 + 53 + if violation.Identifier.Context.EnclosingFunction != "" { 54 + contextParts = append(contextParts, "function="+violation.Identifier.Context.EnclosingFunction) 55 + } 56 + 57 + if len(contextParts) > 0 { 58 + if _, err := fmt.Fprintf(writer, " context: %s\n", strings.Join(contextParts, ", ")); err != nil { 59 + return err 60 + } 61 + } 62 + } 63 + } 64 + 65 + return nil 66 + } 67 + 68 + func renderJSON(writer io.Writer, result analyze.Result, includeContext bool) error { 69 + if !includeContext { 70 + for index := range result.Violations { 71 + result.Violations[index].Identifier.Context = collect.Context{} 72 + } 73 + } 74 + 75 + encoder := json.NewEncoder(writer) 76 + 77 + encoder.SetIndent("", " ") 78 + 79 + return encoder.Encode(result) 80 + }
+20
justfile
··· 1 + set shell := ["bash", "-eu", "-o", "pipefail", "-c"] 2 + 3 + build: 4 + mkdir -p bin 5 + go build -o ./bin/kivia . 6 + 7 + format: 8 + iku -w . || go fmt ./... 9 + 10 + test: 11 + go test ./... 12 + 13 + run *args: 14 + go run . {{args}} 15 + 16 + self: 17 + go run . --path ./... --ignore file=testdata 18 + 19 + install: 20 + go install .
+196
main.go
··· 1 + package main 2 + 3 + import ( 4 + "errors" 5 + "flag" 6 + "fmt" 7 + "github.com/Fuwn/kivia/internal/analyze" 8 + "github.com/Fuwn/kivia/internal/collect" 9 + "github.com/Fuwn/kivia/internal/report" 10 + "os" 11 + "slices" 12 + "strings" 13 + ) 14 + 15 + type options struct { 16 + Path string 17 + OmitContext bool 18 + MinimumEvaluationLength int 19 + Format string 20 + FailOnViolation bool 21 + Ignore []string 22 + } 23 + 24 + func parseOptions(arguments []string) (options, error) { 25 + flagSet := flag.NewFlagSet("kivia", flag.ContinueOnError) 26 + 27 + flagSet.SetOutput(os.Stderr) 28 + 29 + var parsed options 30 + var ignoreValues stringSliceFlag 31 + 32 + flagSet.StringVar(&parsed.Path, "path", "./...", "Path to analyze (directory, file, or ./...).") 33 + flagSet.BoolVar(&parsed.OmitContext, "omit-context", false, "Hide usage context in output.") 34 + flagSet.IntVar(&parsed.MinimumEvaluationLength, "min-eval-length", 1, "Minimum identifier length in runes to evaluate.") 35 + flagSet.StringVar(&parsed.Format, "format", "text", "Output format: text or JSON.") 36 + flagSet.BoolVar(&parsed.FailOnViolation, "fail-on-violation", false, "Exit with code 1 when violations are found.") 37 + flagSet.Var(&ignoreValues, "ignore", "Ignore violations by matcher. Repeat this flag as needed. Prefixes: name=, kind=, file=, reason=, func=.") 38 + 39 + if err := flagSet.Parse(arguments); err != nil { 40 + return options{}, err 41 + } 42 + 43 + if parsed.MinimumEvaluationLength < 1 { 44 + return options{}, errors.New("The --min-eval-length value must be at least 1.") 45 + } 46 + 47 + parsed.Ignore = slices.Clone(ignoreValues) 48 + 49 + return parsed, nil 50 + } 51 + 52 + func run(parsed options) error { 53 + identifiers, err := collect.FromPath(parsed.Path) 54 + 55 + if err != nil { 56 + return err 57 + } 58 + 59 + result, err := analyze.Run(identifiers, analyze.Options{ 60 + MinEvaluationLength: parsed.MinimumEvaluationLength, 61 + }) 62 + 63 + if err != nil { 64 + return err 65 + } 66 + 67 + result = applyIgnoreFilters(result, parsed.Ignore) 68 + 69 + if err := report.Render(os.Stdout, result, parsed.Format, !parsed.OmitContext); err != nil { 70 + return err 71 + } 72 + 73 + if parsed.FailOnViolation && len(result.Violations) > 0 { 74 + return exitCodeError(1) 75 + } 76 + 77 + return nil 78 + } 79 + 80 + func main() { 81 + parsed, err := parseOptions(os.Args[1:]) 82 + 83 + if err != nil { 84 + fmt.Fprintf(os.Stderr, "Error: %v\n", err) 85 + os.Exit(2) 86 + } 87 + 88 + if err := run(parsed); err != nil { 89 + var codeError exitCodeError 90 + 91 + if errors.As(err, &codeError) { 92 + os.Exit(int(codeError)) 93 + } 94 + 95 + fmt.Fprintf(os.Stderr, "Error: %v\n", err) 96 + os.Exit(1) 97 + } 98 + } 99 + 100 + type exitCodeError int 101 + 102 + func (errorCode exitCodeError) Error() string { 103 + return fmt.Sprintf("Process exited with code %d.", int(errorCode)) 104 + } 105 + 106 + type stringSliceFlag []string 107 + 108 + func (values *stringSliceFlag) String() string { 109 + if values == nil { 110 + return "" 111 + } 112 + 113 + return strings.Join(*values, ",") 114 + } 115 + 116 + func (values *stringSliceFlag) Set(value string) error { 117 + trimmed := strings.TrimSpace(value) 118 + 119 + if trimmed == "" { 120 + return errors.New("Ignore matcher cannot be empty.") 121 + } 122 + 123 + *values = append(*values, trimmed) 124 + 125 + return nil 126 + } 127 + 128 + func applyIgnoreFilters(result analyze.Result, ignoreMatchers []string) analyze.Result { 129 + if len(ignoreMatchers) == 0 || len(result.Violations) == 0 { 130 + return result 131 + } 132 + 133 + filteredViolations := make([]analyze.Violation, 0, len(result.Violations)) 134 + 135 + for _, violation := range result.Violations { 136 + if shouldIgnoreViolation(violation, ignoreMatchers) { 137 + continue 138 + } 139 + 140 + filteredViolations = append(filteredViolations, violation) 141 + } 142 + 143 + result.Violations = filteredViolations 144 + 145 + return result 146 + } 147 + 148 + func shouldIgnoreViolation(violation analyze.Violation, ignoreMatchers []string) bool { 149 + for _, matcher := range ignoreMatchers { 150 + if matchesViolation(matcher, violation) { 151 + return true 152 + } 153 + } 154 + 155 + return false 156 + } 157 + 158 + func matchesViolation(matcher string, violation analyze.Violation) bool { 159 + normalizedMatcher := strings.ToLower(strings.TrimSpace(matcher)) 160 + 161 + if normalizedMatcher == "" { 162 + return false 163 + } 164 + 165 + identifier := violation.Identifier 166 + 167 + if strings.HasPrefix(normalizedMatcher, "name=") { 168 + return strings.Contains(strings.ToLower(identifier.Name), strings.TrimPrefix(normalizedMatcher, "name=")) 169 + } 170 + 171 + if strings.HasPrefix(normalizedMatcher, "kind=") { 172 + return strings.Contains(strings.ToLower(identifier.Kind), strings.TrimPrefix(normalizedMatcher, "kind=")) 173 + } 174 + 175 + if strings.HasPrefix(normalizedMatcher, "file=") { 176 + return strings.Contains(strings.ToLower(identifier.File), strings.TrimPrefix(normalizedMatcher, "file=")) 177 + } 178 + 179 + if strings.HasPrefix(normalizedMatcher, "reason=") { 180 + return strings.Contains(strings.ToLower(violation.Reason), strings.TrimPrefix(normalizedMatcher, "reason=")) 181 + } 182 + 183 + if strings.HasPrefix(normalizedMatcher, "func=") { 184 + return strings.Contains(strings.ToLower(identifier.Context.EnclosingFunction), strings.TrimPrefix(normalizedMatcher, "func=")) 185 + } 186 + 187 + composite := strings.ToLower(strings.Join([]string{ 188 + identifier.Name, 189 + identifier.Kind, 190 + identifier.File, 191 + violation.Reason, 192 + identifier.Context.EnclosingFunction, 193 + }, " ")) 194 + 195 + return strings.Contains(composite, normalizedMatcher) 196 + }
+103
main_test.go
··· 1 + package main 2 + 3 + import ( 4 + "github.com/Fuwn/kivia/internal/analyze" 5 + "github.com/Fuwn/kivia/internal/collect" 6 + "testing" 7 + ) 8 + 9 + func TestParseOptionsDefaults(testingContext *testing.T) { 10 + options, err := parseOptions([]string{"--path", "./testdata"}) 11 + 12 + if err != nil { 13 + testingContext.Fatalf("parseOptions returned an error: %v", err) 14 + } 15 + 16 + if options.MinimumEvaluationLength != 1 { 17 + testingContext.Fatalf("Expected min-eval-length to default to 1, got %d.", options.MinimumEvaluationLength) 18 + } 19 + } 20 + 21 + func TestParseOptionsRejectsStrictFlag(testingContext *testing.T) { 22 + _, err := parseOptions([]string{"--path", "./testdata", "--strict"}) 23 + 24 + if err == nil { 25 + testingContext.Fatalf("Expected parseOptions to fail when --strict is provided.") 26 + } 27 + } 28 + 29 + func TestParseOptionsReadsMultipleIgnoreFlags(testingContext *testing.T) { 30 + options, err := parseOptions([]string{ 31 + "--path", "./testdata", 32 + "--ignore", "name=ctx", 33 + "--ignore", "file=_test.go", 34 + "--ignore", "reason=too short", 35 + }) 36 + 37 + if err != nil { 38 + testingContext.Fatalf("parseOptions returned an error: %v", err) 39 + } 40 + 41 + if len(options.Ignore) != 3 { 42 + testingContext.Fatalf("Expected three ignore values, got %d.", len(options.Ignore)) 43 + } 44 + } 45 + 46 + func TestParseOptionsRejectsInvalidMinimumEvaluationLength(testingContext *testing.T) { 47 + _, err := parseOptions([]string{"--path", "./testdata", "--min-eval-length", "0"}) 48 + 49 + if err == nil { 50 + testingContext.Fatalf("Expected parseOptions to fail for min-eval-length=0.") 51 + } 52 + } 53 + 54 + func TestApplyIgnoreFilters(testingContext *testing.T) { 55 + input := analyzeResultFixture() 56 + filtered := applyIgnoreFilters(input, []string{ 57 + "name=ctx", 58 + "reason=too short", 59 + "file=_test.go", 60 + }) 61 + 62 + if len(filtered.Violations) != 1 { 63 + testingContext.Fatalf("Expected one remaining violation, got %d.", len(filtered.Violations)) 64 + } 65 + 66 + if filtered.Violations[0].Identifier.Name != "userNum" { 67 + testingContext.Fatalf("Unexpected remaining violation: %q.", filtered.Violations[0].Identifier.Name) 68 + } 69 + } 70 + 71 + func analyzeResultFixture() analyze.Result { 72 + return analyze.Result{ 73 + Violations: []analyze.Violation{ 74 + { 75 + Identifier: collect.Identifier{ 76 + Name: "ctx", 77 + Kind: "parameter", 78 + File: "sample.go", 79 + Context: collect.Context{ 80 + EnclosingFunction: "Handle", 81 + }, 82 + }, 83 + Reason: "Contains abbreviation: ctx.", 84 + }, 85 + { 86 + Identifier: collect.Identifier{ 87 + Name: "t", 88 + Kind: "parameter", 89 + File: "main_test.go", 90 + }, 91 + Reason: "Name is too short to be self-documenting.", 92 + }, 93 + { 94 + Identifier: collect.Identifier{ 95 + Name: "userNum", 96 + Kind: "parameter", 97 + File: "sample.go", 98 + }, 99 + Reason: "Contains abbreviation: num.", 100 + }, 101 + }, 102 + } 103 + }
+9
testdata/dictionary/words.txt
··· 1 + option 2 + has 3 + resource 4 + parsed 5 + identifier 6 + allowlist 7 + context 8 + error 9 + expression
+20
testdata/samplepkg/sample.go
··· 1 + package samplepkg 2 + 3 + import "context" 4 + 5 + type UserCfg struct { 6 + userNum int 7 + } 8 + 9 + func Handle(ctx context.Context, userNum int) int { 10 + resultVal := userNum + 1 11 + 12 + for idx, usr := range []string{"a", "b"} { 13 + _ = idx 14 + _ = usr 15 + } 16 + 17 + _ = ctx 18 + 19 + return resultVal 20 + }