Kieran's opinionated (and probably slightly dumb) nix config

feat: update copilot models

dunkirk.sh 808e4c3a 6cb12136

verified
+136 -19
+136 -19
modules/home/apps/crush.nix
··· 7 7 options.atelier.apps.crush.enable = lib.mkEnableOption "Enable Crush config"; 8 8 config = lib.mkIf config.atelier.apps.crush.enable { 9 9 atelier.apps.anthropic-manager.enable = lib.mkDefault true; 10 - 10 + 11 11 programs.crush = { 12 12 enable = true; 13 13 settings = { ··· 40 40 }; 41 41 models = [ 42 42 { 43 - id = "gpt-4.1"; 44 - name = "Copilot: GPT-4.1"; 43 + id = "gpt-5.2-codex"; 44 + name = "Copilot: OpenAI GPT-5.2-Codex"; 45 45 cost_per_1m_in = 0; 46 46 cost_per_1m_out = 0; 47 47 cost_per_1m_in_cached = 0; 48 48 cost_per_1m_out_cached = 0; 49 - context_window = 128000; 50 - default_max_tokens = 16384; 49 + context_window = 400000; 50 + default_max_tokens = 128000; 51 51 can_reason = true; 52 52 has_reasoning_efforts = false; 53 53 supports_attachments = true; ··· 79 79 supports_attachments = true; 80 80 } 81 81 { 82 - id = "gpt-4o"; 83 - name = "Copilot: GPT-4o"; 82 + id = "grok-code-fast-1"; 83 + name = "Copilot: Grok Code Fast 1"; 84 84 cost_per_1m_in = 0; 85 85 cost_per_1m_out = 0; 86 86 cost_per_1m_in_cached = 0; 87 87 cost_per_1m_out_cached = 0; 88 88 context_window = 128000; 89 - default_max_tokens = 4096; 89 + default_max_tokens = 64000; 90 90 can_reason = true; 91 91 has_reasoning_efforts = false; 92 - supports_attachments = true; 92 + supports_attachments = false; 93 93 } 94 94 { 95 - id = "grok-code-fast-1"; 96 - name = "Copilot: Grok Code Fast 1"; 95 + id = "gpt-5.1"; 96 + name = "Copilot: GPT-5.1"; 97 97 cost_per_1m_in = 0; 98 98 cost_per_1m_out = 0; 99 99 cost_per_1m_in_cached = 0; 100 100 cost_per_1m_out_cached = 0; 101 - context_window = 128000; 101 + context_window = 264000; 102 102 default_max_tokens = 64000; 103 103 can_reason = true; 104 104 has_reasoning_efforts = false; 105 - supports_attachments = false; 105 + supports_attachments = true; 106 106 } 107 107 { 108 - id = "gpt-5-codex"; 109 - name = "Copilot: GPT-5-Codex (Preview)"; 108 + id = "gpt-5.1-codex"; 109 + name = "Copilot: GPT-5.1-Codex"; 110 110 cost_per_1m_in = 0; 111 111 cost_per_1m_out = 0; 112 112 cost_per_1m_in_cached = 0; ··· 118 118 supports_attachments = true; 119 119 } 120 120 { 121 - id = "claude-3.5-sonnet"; 122 - name = "Copilot: Claude Sonnet 3.5"; 121 + id = "gpt-5.1-codex-mini"; 122 + name = "Copilot: GPT-5.1-Codex-Mini"; 123 123 cost_per_1m_in = 0; 124 124 cost_per_1m_out = 0; 125 125 cost_per_1m_in_cached = 0; 126 126 cost_per_1m_out_cached = 0; 127 - context_window = 90000; 128 - default_max_tokens = 8192; 127 + context_window = 400000; 128 + default_max_tokens = 128000; 129 + can_reason = true; 130 + has_reasoning_efforts = false; 131 + supports_attachments = true; 132 + } 133 + { 134 + id = "gpt-5.1-codex-max"; 135 + name = "Copilot: GPT-5.1-Codex-Max"; 136 + cost_per_1m_in = 0; 137 + cost_per_1m_out = 0; 138 + cost_per_1m_in_cached = 0; 139 + cost_per_1m_out_cached = 0; 140 + context_window = 400000; 141 + default_max_tokens = 128000; 142 + can_reason = true; 143 + has_reasoning_efforts = false; 144 + supports_attachments = true; 145 + } 146 + { 147 + id = "gpt-5-codex"; 148 + name = "Copilot: GPT-5-Codex (Preview)"; 149 + cost_per_1m_in = 0; 150 + cost_per_1m_out = 0; 151 + cost_per_1m_in_cached = 0; 152 + cost_per_1m_out_cached = 0; 153 + context_window = 400000; 154 + default_max_tokens = 128000; 129 155 can_reason = true; 130 156 has_reasoning_efforts = false; 131 157 supports_attachments = true; ··· 157 183 supports_attachments = true; 158 184 } 159 185 { 186 + id = "claude-opus-4.5"; 187 + name = "Copilot: Claude Opus 4.5"; 188 + cost_per_1m_in = 0; 189 + cost_per_1m_out = 0; 190 + cost_per_1m_in_cached = 0; 191 + cost_per_1m_out_cached = 0; 192 + context_window = 160000; 193 + default_max_tokens = 16000; 194 + can_reason = true; 195 + has_reasoning_efforts = false; 196 + supports_attachments = true; 197 + } 198 + { 160 199 id = "claude-haiku-4.5"; 161 200 name = "Copilot: Claude Haiku 4.5"; 162 201 cost_per_1m_in = 0; ··· 170 209 supports_attachments = true; 171 210 } 172 211 { 212 + id = "gemini-3-pro-preview"; 213 + name = "Copilot: Gemini 3 Pro (Preview)"; 214 + cost_per_1m_in = 0; 215 + cost_per_1m_out = 0; 216 + cost_per_1m_in_cached = 0; 217 + cost_per_1m_out_cached = 0; 218 + context_window = 128000; 219 + default_max_tokens = 64000; 220 + can_reason = true; 221 + has_reasoning_efforts = false; 222 + supports_attachments = true; 223 + } 224 + { 225 + id = "gemini-3-flash-preview"; 226 + name = "Copilot: Gemini 3 Flash (Preview)"; 227 + cost_per_1m_in = 0; 228 + cost_per_1m_out = 0; 229 + cost_per_1m_in_cached = 0; 230 + cost_per_1m_out_cached = 0; 231 + context_window = 128000; 232 + default_max_tokens = 64000; 233 + can_reason = true; 234 + has_reasoning_efforts = false; 235 + supports_attachments = true; 236 + } 237 + { 173 238 id = "gemini-2.5-pro"; 174 239 name = "Copilot: Gemini 2.5 Pro"; 175 240 cost_per_1m_in = 0; ··· 178 243 cost_per_1m_out_cached = 0; 179 244 context_window = 128000; 180 245 default_max_tokens = 64000; 246 + can_reason = true; 247 + has_reasoning_efforts = false; 248 + supports_attachments = true; 249 + } 250 + { 251 + id = "oswe-vscode-prime"; 252 + name = "Copilot: Raptor mini (Preview)"; 253 + cost_per_1m_in = 0; 254 + cost_per_1m_out = 0; 255 + cost_per_1m_in_cached = 0; 256 + cost_per_1m_out_cached = 0; 257 + context_window = 264000; 258 + default_max_tokens = 64000; 259 + can_reason = true; 260 + has_reasoning_efforts = false; 261 + supports_attachments = true; 262 + } 263 + { 264 + id = "gpt-5.2"; 265 + name = "Copilot: GPT-5.2"; 266 + cost_per_1m_in = 0; 267 + cost_per_1m_out = 0; 268 + cost_per_1m_in_cached = 0; 269 + cost_per_1m_out_cached = 0; 270 + context_window = 264000; 271 + default_max_tokens = 64000; 272 + can_reason = true; 273 + has_reasoning_efforts = false; 274 + supports_attachments = true; 275 + } 276 + { 277 + id = "gpt-4.1"; 278 + name = "Copilot: GPT-4.1"; 279 + cost_per_1m_in = 0; 280 + cost_per_1m_out = 0; 281 + cost_per_1m_in_cached = 0; 282 + cost_per_1m_out_cached = 0; 283 + context_window = 128000; 284 + default_max_tokens = 16384; 285 + can_reason = true; 286 + has_reasoning_efforts = false; 287 + supports_attachments = true; 288 + } 289 + { 290 + id = "gpt-4o"; 291 + name = "Copilot: GPT-4o"; 292 + cost_per_1m_in = 0; 293 + cost_per_1m_out = 0; 294 + cost_per_1m_in_cached = 0; 295 + cost_per_1m_out_cached = 0; 296 + context_window = 128000; 297 + default_max_tokens = 4096; 181 298 can_reason = true; 182 299 has_reasoning_efforts = false; 183 300 supports_attachments = true;