Monorepo management for opam overlays

monopam: Format code with dune fmt

+2961 -1993
+9 -1
bin/dune
··· 2 2 (name main) 3 3 (public_name monopam) 4 4 (package monopam) 5 - (libraries monopam requests eio_main cmdliner fmt.tty fmt.cli logs.fmt logs.cli)) 5 + (libraries 6 + monopam 7 + requests 8 + eio_main 9 + cmdliner 10 + fmt.tty 11 + fmt.cli 12 + logs.fmt 13 + logs.cli))
+273 -180
bin/main.ml
··· 12 12 13 13 let logging_term = 14 14 let verbose_http_term = 15 - Term.(const (fun ws -> ws.Requests.Cmd.value) $ Requests.Cmd.verbose_http_term "monopam") 15 + Term.( 16 + const (fun ws -> ws.Requests.Cmd.value) 17 + $ Requests.Cmd.verbose_http_term "monopam") 16 18 in 17 - Term.(const setup_logging $ Fmt_cli.style_renderer () $ Logs_cli.level () $ verbose_http_term) 19 + Term.( 20 + const setup_logging $ Fmt_cli.style_renderer () $ Logs_cli.level () 21 + $ verbose_http_term) 18 22 19 23 let package_arg = 20 24 let doc = "Package name. If not specified, operates on all packages." in ··· 31 35 let checkouts = Monopam.Verse_config.src_path verse_config in 32 36 let monorepo = Monopam.Verse_config.mono_path verse_config in 33 37 let default_branch = Monopam.Verse_config.default_branch in 34 - Ok (Monopam.Config.create ~opam_repo ~checkouts ~monorepo ~default_branch ()) 38 + Ok 39 + (Monopam.Config.create ~opam_repo ~checkouts ~monorepo ~default_branch 40 + ()) 35 41 36 42 let with_config env f = 37 43 match load_config env with ··· 57 63 `I ("remote:", "Sync between your checkout (src/) and upstream git remote"); 58 64 `S "LOCAL SYNC INDICATORS"; 59 65 `I ("local:=", "Monorepo and checkout are in sync"); 60 - `I ("local:+N", "Monorepo has N commits not yet in checkout (run $(b,monopam sync))"); 61 - `I ("local:-N", "Checkout has N commits not yet in monorepo (run $(b,monopam sync))"); 66 + `I 67 + ( "local:+N", 68 + "Monorepo has N commits not yet in checkout (run $(b,monopam sync))" 69 + ); 70 + `I 71 + ( "local:-N", 72 + "Checkout has N commits not yet in monorepo (run $(b,monopam sync))" 73 + ); 62 74 `I ("local:sync", "Trees differ, needs sync (run $(b,monopam sync))"); 63 75 `S "REMOTE SYNC INDICATORS"; 64 76 `I ("remote:=", "Checkout and upstream remote are in sync"); 65 - `I ("remote:+N", "Checkout has N commits to push (run $(b,monopam sync --remote))"); 77 + `I 78 + ( "remote:+N", 79 + "Checkout has N commits to push (run $(b,monopam sync --remote))" ); 66 80 `I ("remote:-N", "Upstream has N commits to pull (run $(b,monopam sync))"); 67 81 `I ("remote:+N/-M", "Diverged: checkout +N ahead, upstream +M ahead"); 68 82 `S "FORK ANALYSIS"; ··· 94 108 (* Check for unregistered opam files *) 95 109 (match Monopam.discover_packages ~fs ~config () with 96 110 | Ok pkgs -> 97 - let unregistered = Monopam.find_unregistered_opam_files ~fs ~config pkgs in 111 + let unregistered = 112 + Monopam.find_unregistered_opam_files ~fs ~config pkgs 113 + in 98 114 if unregistered <> [] then begin 99 115 (* Get local handle abbreviation *) 100 - let handle_abbrev = match Monopam.Verse_config.load ~fs () with 101 - | Ok vc -> 116 + let handle_abbrev = 117 + match Monopam.Verse_config.load ~fs () with 118 + | Ok vc -> ( 102 119 let h = Monopam.Verse_config.handle vc in 103 - (match String.split_on_char '.' h with 104 - | first :: _ -> if String.length first <= 4 then first else String.sub first 0 3 105 - | [] -> h) 120 + match String.split_on_char '.' h with 121 + | first :: _ -> 122 + if String.length first <= 4 then first 123 + else String.sub first 0 3 124 + | [] -> h) 106 125 | Error _ -> "local" 107 126 in 108 127 Fmt.pr "%a %a\n" 109 - Fmt.(styled `Bold string) "Unregistered:" 110 - Fmt.(styled `Faint int) (List.length unregistered); 111 - List.iter (fun (_r, p) -> 112 - Fmt.pr " %-22s %a\n" p Fmt.(styled `Faint (fun ppf s -> pf ppf "%s~" s)) handle_abbrev) 128 + Fmt.(styled `Bold string) 129 + "Unregistered:" 130 + Fmt.(styled `Faint int) 131 + (List.length unregistered); 132 + List.iter 133 + (fun (_r, p) -> 134 + Fmt.pr " %-22s %a\n" p 135 + Fmt.(styled `Faint (fun ppf s -> pf ppf "%s~" s)) 136 + handle_abbrev) 113 137 unregistered 114 138 end 115 139 | Error _ -> ()); ··· 118 142 | Error _ -> () 119 143 | Ok verse_config -> 120 144 let forks = 121 - Monopam.Forks.compute ~proc ~fs ~verse_config ~monopam_config:config () 145 + Monopam.Forks.compute ~proc ~fs ~verse_config 146 + ~monopam_config:config () 122 147 in 123 148 if forks.repos <> [] then 124 149 Fmt.pr "%a" (Monopam.Forks.pp_summary' ~show_all) forks); ··· 160 185 `S "PHASES"; 161 186 `P "The sync command executes these phases in order:"; 162 187 `I ("1. Validate", "Abort if the monorepo has uncommitted changes"); 163 - `I ("2. Push", "Export monorepo changes to checkouts (parallel) [--skip-push skips]"); 188 + `I 189 + ( "2. Push", 190 + "Export monorepo changes to checkouts (parallel) [--skip-push skips]" 191 + ); 164 192 `I ("3. Fetch", "Clone/fetch from remotes (parallel) [--skip-pull skips]"); 165 193 `I ("4. Merge", "Fast-forward merge checkouts [--skip-pull skips]"); 166 194 `I ("5. Subtree", "Pull subtrees into monorepo [--skip-pull skips]"); 167 195 `I ("6. Finalize", "Update README.md, CLAUDE.md, and dune-project"); 168 196 `I ("7. Remote", "Push to upstream remotes if --remote (parallel)"); 169 197 `S "SKIP OPTIONS"; 170 - `I ("--skip-push", "Skip exporting monorepo changes to checkouts. Use when \ 171 - you know you have no local changes to export."); 172 - `I ("--skip-pull", "Skip fetching and pulling from remotes. Use when you \ 173 - only want to export local changes without pulling remote updates."); 198 + `I 199 + ( "--skip-push", 200 + "Skip exporting monorepo changes to checkouts. Use when you know you \ 201 + have no local changes to export." ); 202 + `I 203 + ( "--skip-pull", 204 + "Skip fetching and pulling from remotes. Use when you only want to \ 205 + export local changes without pulling remote updates." ); 174 206 `S "PREREQUISITES"; 175 207 `P "Before running sync:"; 176 - `I ("-", "Commit all changes in the monorepo: $(b,git add -A && git commit)"); 208 + `I 209 + ( "-", 210 + "Commit all changes in the monorepo: $(b,git add -A && git commit)" ); 177 211 `I ("-", "For --remote: ensure git credentials/SSH keys are configured"); 178 212 ] 179 213 in ··· 197 231 with_config env @@ fun config -> 198 232 let fs = Eio.Stdenv.fs env in 199 233 let proc = Eio.Stdenv.process_mgr env in 200 - match Monopam.sync ~proc ~fs ~config ?package ~remote ~skip_push ~skip_pull () with 234 + match 235 + Monopam.sync ~proc ~fs ~config ?package ~remote ~skip_push ~skip_pull () 236 + with 201 237 | Ok summary -> 202 - if summary.errors = [] then 203 - `Ok () 238 + if summary.errors = [] then `Ok () 204 239 else begin 205 - Fmt.epr "Sync completed with %d errors.@." (List.length summary.errors); 240 + Fmt.epr "Sync completed with %d errors.@." 241 + (List.length summary.errors); 206 242 `Ok () 207 243 end 208 244 | Error e -> ··· 210 246 `Error (false, "sync failed") 211 247 in 212 248 Cmd.v info 213 - Term.(ret (const run $ package_arg $ remote_arg $ skip_push_arg $ skip_pull_arg $ logging_term)) 249 + Term.( 250 + ret 251 + (const run $ package_arg $ remote_arg $ skip_push_arg $ skip_pull_arg 252 + $ logging_term)) 214 253 215 254 (* Changes command *) 216 255 ··· 223 262 `P 224 263 "By default, generates weekly entries. Use --daily to generate daily \ 225 264 entries instead."; 226 - `P 227 - "Changes are stored in the .changes directory at the monorepo root:"; 265 + `P "Changes are stored in the .changes directory at the monorepo root:"; 228 266 `I (".changes/<repo>.json", "Weekly changelog entries"); 229 267 `I (".changes/<repo>-daily.json", "Daily changelog entries"); 230 - `I (".changes/YYYYMMDD.json", "Aggregated daily entries (default with --daily)"); 231 - `P 232 - "Also generates aggregated markdown files at the monorepo root:"; 268 + `I 269 + ( ".changes/YYYYMMDD.json", 270 + "Aggregated daily entries (default with --daily)" ); 271 + `P "Also generates aggregated markdown files at the monorepo root:"; 233 272 `I ("CHANGES.md", "Aggregated weekly changelog"); 234 273 `I ("DAILY-CHANGES.md", "Aggregated daily changelog"); 235 274 `P "Each entry includes:"; ··· 245 284 (empty summary and changes) rather than 'no changes' text."; 246 285 `P 247 286 "When using --daily, an aggregated JSON file is generated by default \ 248 - for the poe Zulip bot broadcasting system. Use --no-aggregate to skip."; 287 + for the poe Zulip bot broadcasting system. Use --no-aggregate to \ 288 + skip."; 249 289 `P 250 290 "If a per-repo-per-day JSON file already exists for a past day, that \ 251 291 repo is skipped for that day to avoid redundant Claude API calls."; ··· 257 297 Arg.(value & flag & info [ "daily"; "d" ] ~doc) 258 298 in 259 299 let weeks = 260 - let doc = "Number of past weeks to analyze (default: 1, current week only). Ignored if --daily is set." in 300 + let doc = 301 + "Number of past weeks to analyze (default: 1, current week only). \ 302 + Ignored if --daily is set." 303 + in 261 304 Arg.(value & opt int 1 & info [ "w"; "weeks" ] ~doc) 262 305 in 263 306 let days = 264 - let doc = "Number of past days to analyze when using --daily (default: 1, today only)" in 307 + let doc = 308 + "Number of past days to analyze when using --daily (default: 1, today \ 309 + only)" 310 + in 265 311 Arg.(value & opt int 1 & info [ "days" ] ~doc) 266 312 in 267 313 let history = 268 - let doc = "Number of recent entries to include in aggregated markdown (default: 12 for weekly, 30 for daily)" in 314 + let doc = 315 + "Number of recent entries to include in aggregated markdown (default: 12 \ 316 + for weekly, 30 for daily)" 317 + in 269 318 Arg.(value & opt int 12 & info [ "history" ] ~doc) 270 319 in 271 320 let dry_run = ··· 273 322 Arg.(value & flag & info [ "dry-run"; "n" ] ~doc) 274 323 in 275 324 let no_aggregate = 276 - let doc = "Skip generating .changes/YYYYMMDD.json aggregated file (--daily generates it by default)" in 325 + let doc = 326 + "Skip generating .changes/YYYYMMDD.json aggregated file (--daily \ 327 + generates it by default)" 328 + in 277 329 Arg.(value & flag & info [ "no-aggregate" ] ~doc) 278 330 in 279 331 let run package daily weeks days history dry_run no_aggregate () = ··· 288 340 let history = if history = 12 then 30 else history in 289 341 (* Aggregate by default for daily, unless --no-aggregate is passed *) 290 342 let aggregate = not no_aggregate in 291 - Monopam.changes_daily ~proc ~fs ~config ~clock ?package ~days ~history ~dry_run ~aggregate () 343 + Monopam.changes_daily ~proc ~fs ~config ~clock ?package ~days ~history 344 + ~dry_run ~aggregate () 292 345 end 293 346 else 294 - Monopam.changes ~proc ~fs ~config ~clock ?package ~weeks ~history ~dry_run () 347 + Monopam.changes ~proc ~fs ~config ~clock ?package ~weeks ~history 348 + ~dry_run () 295 349 in 296 350 match result with 297 351 | Ok () -> ··· 318 372 `S Manpage.s_description; 319 373 `P 320 374 "Copies .opam files from monorepo subtrees to the opam-repo overlay. \ 321 - This ensures your opam overlay reflects any changes you made to \ 322 - .opam files in the monorepo."; 375 + This ensures your opam overlay reflects any changes you made to .opam \ 376 + files in the monorepo."; 323 377 `S "HOW IT WORKS"; 324 378 `P "For each package in your opam overlay:"; 325 - `I ("1.", "Reads the .opam file from the monorepo subtree (e.g., mono/eio/eio.opam)"); 326 - `I ("2.", "Compares with the opam-repo version (e.g., opam-repo/packages/eio/eio.dev/opam)"); 379 + `I 380 + ( "1.", 381 + "Reads the .opam file from the monorepo subtree (e.g., \ 382 + mono/eio/eio.opam)" ); 383 + `I 384 + ( "2.", 385 + "Compares with the opam-repo version (e.g., \ 386 + opam-repo/packages/eio/eio.dev/opam)" ); 327 387 `I ("3.", "If different, copies monorepo → opam-repo"); 328 388 `I ("4.", "Stages and commits changes in opam-repo"); 329 389 `S "PRECEDENCE"; ··· 343 403 let proc = Eio.Stdenv.process_mgr env in 344 404 match Monopam.sync_opam_files ~proc ~fs ~config ?package () with 345 405 | Ok result -> 346 - if result.synced = [] then 347 - Fmt.pr "All opam files already in sync.@." 348 - else 349 - Fmt.pr "Synced %d opam files.@." (List.length result.synced); 406 + if result.synced = [] then Fmt.pr "All opam files already in sync.@." 407 + else Fmt.pr "Synced %d opam files.@." (List.length result.synced); 350 408 `Ok () 351 409 | Error e -> 352 410 Fmt.epr "Error: %a@." Monopam.pp_error_with_hint e; ··· 382 440 `Error (false, "configuration error") 383 441 384 442 let verse_root_arg = 385 - let doc = "Path to workspace root directory. Defaults to current directory." in 443 + let doc = 444 + "Path to workspace root directory. Defaults to current directory." 445 + in 386 446 Arg.( 387 447 value 388 448 & opt (some (conv (Fpath.of_string, Fpath.pp))) None ··· 390 450 391 451 let verse_handle_arg = 392 452 let doc = "Tangled handle (e.g., alice.bsky.social)" in 393 - Arg.(required & opt (some string) None & info [ "handle" ] ~docv:"HANDLE" ~doc) 453 + Arg.( 454 + required & opt (some string) None & info [ "handle" ] ~docv:"HANDLE" ~doc) 394 455 395 456 let verse_handle_opt_pos_arg = 396 - let doc = "Tangled handle. If not specified, operates on all tracked members." in 457 + let doc = 458 + "Tangled handle. If not specified, operates on all tracked members." 459 + in 397 460 Arg.(value & pos 0 (some string) None & info [] ~docv:"HANDLE" ~doc) 398 461 399 462 let verse_init_cmd = ··· 402 465 [ 403 466 `S Manpage.s_description; 404 467 `P 405 - "Creates a new opamverse workspace for federated monorepo collaboration. \ 406 - An opamverse workspace lets you browse and track other developers' \ 407 - monorepos alongside your own."; 468 + "Creates a new opamverse workspace for federated monorepo \ 469 + collaboration. An opamverse workspace lets you browse and track other \ 470 + developers' monorepos alongside your own."; 408 471 `S "WORKSPACE STRUCTURE"; 409 - `P "The init command creates the following directory structure at the workspace root:"; 472 + `P 473 + "The init command creates the following directory structure at the \ 474 + workspace root:"; 410 475 `I ("mono/", "Your monorepo - use with standard monopam commands"); 411 476 `I ("src/", "Your source checkouts - individual git repos"); 412 477 `I ("verse/", "Other users' monorepos, organized by handle"); 413 478 `P "Configuration and data are stored in XDG directories:"; 414 479 `I ("~/.config/monopam/opamverse.toml", "Workspace configuration"); 415 - `I ("~/.local/share/monopam/opamverse-registry/", "Git clone of the community registry"); 480 + `I 481 + ( "~/.local/share/monopam/opamverse-registry/", 482 + "Git clone of the community registry" ); 416 483 `S "CONFIGURATION FILE"; 417 484 `P "The opamverse.toml file has the following structure:"; 418 - `Pre "[workspace]\n\ 419 - root = \"/path/to/workspace\"\n\ 420 - default_branch = \"main\"\n\n\ 421 - [paths]\n\ 422 - mono = \"mono\"\n\ 423 - src = \"src\"\n\ 424 - verse = \"verse\"\n\n\ 425 - [identity]\n\ 426 - handle = \"yourname.bsky.social\""; 485 + `Pre 486 + "[workspace]\n\ 487 + root = \"/path/to/workspace\"\n\ 488 + default_branch = \"main\"\n\n\ 489 + [paths]\n\ 490 + mono = \"mono\"\n\ 491 + src = \"src\"\n\ 492 + verse = \"verse\"\n\n\ 493 + [identity]\n\ 494 + handle = \"yourname.bsky.social\""; 427 495 `S "AUTHENTICATION"; 428 - `P 429 - "Before running init, you must authenticate with the tangled network:"; 496 + `P "Before running init, you must authenticate with the tangled network:"; 430 497 `Pre "tangled auth login"; 431 498 `P 432 499 "The handle you provide is validated against the AT Protocol identity \ 433 500 system to ensure it exists and you are authenticated."; 434 501 `S "REGISTRY"; 435 502 `P 436 - "The opamverse registry is a git repository containing an opamverse.toml \ 437 - file that lists community members and their monorepo URLs. The default \ 438 - registry is at: https://tangled.org/eeg.cl.cam.ac.uk/opamverse"; 503 + "The opamverse registry is a git repository containing an \ 504 + opamverse.toml file that lists community members and their monorepo \ 505 + URLs. The default registry is at: \ 506 + https://tangled.org/eeg.cl.cam.ac.uk/opamverse"; 439 507 `S Manpage.s_examples; 440 508 `P "Initialize a workspace in ~/tangled:"; 441 - `Pre "cd ~/tangled\n\ 442 - monopam verse init --handle alice.bsky.social"; 509 + `Pre "cd ~/tangled\nmonopam verse init --handle alice.bsky.social"; 443 510 `P "Initialize with explicit root path:"; 444 511 `Pre "monopam verse init --root ~/my-workspace --handle alice.bsky.social"; 445 512 ] ··· 452 519 let root = 453 520 match root with 454 521 | Some r -> r 455 - | None -> 522 + | None -> ( 456 523 let cwd_path = Eio.Stdenv.cwd env in 457 524 let _, cwd_str = (cwd_path :> _ Eio.Path.t) in 458 525 match Fpath.of_string cwd_str with 459 526 | Ok p -> p 460 - | Error (`Msg _) -> Fpath.v "." 527 + | Error (`Msg _) -> Fpath.v ".") 461 528 in 462 529 match Monopam.Verse.init ~proc ~fs ~root ~handle () with 463 530 | Ok () -> ··· 467 534 Fmt.epr "Error: %a@." Monopam.Verse.pp_error_with_hint e; 468 535 `Error (false, "init failed") 469 536 in 470 - Cmd.v info Term.(ret (const run $ verse_root_arg $ verse_handle_arg $ logging_term)) 537 + Cmd.v info 538 + Term.(ret (const run $ verse_root_arg $ verse_handle_arg $ logging_term)) 471 539 472 540 let verse_members_cmd = 473 541 let doc = "List registry members" in ··· 476 544 `S Manpage.s_description; 477 545 `P 478 546 "Lists all members registered in the opamverse community registry. \ 479 - This shows everyone who has published their monorepo for collaboration."; 547 + This shows everyone who has published their monorepo for \ 548 + collaboration."; 480 549 `P 481 550 "The registry is automatically pulled (git pull) when running this \ 482 551 command to ensure you see the latest members."; ··· 484 553 `P 485 554 "The registry is a git repository containing an opamverse.toml file \ 486 555 with the following structure:"; 487 - `Pre "[registry]\n\ 488 - name = \"tangled-community\"\n\n\ 489 - [[members]]\n\ 490 - handle = \"alice.bsky.social\"\n\ 491 - monorepo = \"https://github.com/alice/mono\"\n\n\ 492 - [[members]]\n\ 493 - handle = \"bob.example.com\"\n\ 494 - monorepo = \"https://github.com/bob/mono\""; 556 + `Pre 557 + "[registry]\n\ 558 + name = \"tangled-community\"\n\n\ 559 + [[members]]\n\ 560 + handle = \"alice.bsky.social\"\n\ 561 + monorepo = \"https://github.com/alice/mono\"\n\n\ 562 + [[members]]\n\ 563 + handle = \"bob.example.com\"\n\ 564 + monorepo = \"https://github.com/bob/mono\""; 495 565 `S "OUTPUT"; 496 566 `P "Each line shows a member's handle and their monorepo git URL:"; 497 - `Pre "alice.bsky.social -> https://github.com/alice/mono\n\ 498 - bob.example.com -> https://github.com/bob/mono"; 567 + `Pre 568 + "alice.bsky.social -> https://github.com/alice/mono\n\ 569 + bob.example.com -> https://github.com/bob/mono"; 499 570 `S "ADDING YOURSELF"; 500 571 `P 501 572 "To add yourself to the registry, submit a pull request to the \ ··· 537 608 `P "Without arguments: syncs all members in the registry."; 538 609 `S "ERROR HANDLING"; 539 610 `P 540 - "If a sync fails for one member (e.g., network error), the error \ 541 - is reported but other members are still synced."; 611 + "If a sync fails for one member (e.g., network error), the error is \ 612 + reported but other members are still synced."; 542 613 `S Manpage.s_examples; 543 - `Pre "# Sync all registry members\n\ 544 - monopam verse pull\n\n\ 545 - # Sync a specific member\n\ 546 - monopam verse pull alice.bsky.social\n\n\ 547 - # Browse their code\n\ 548 - ls verse/alice.bsky.social/"; 614 + `Pre 615 + "# Sync all registry members\n\ 616 + monopam verse pull\n\n\ 617 + # Sync a specific member\n\ 618 + monopam verse pull alice.bsky.social\n\n\ 619 + # Browse their code\n\ 620 + ls verse/alice.bsky.social/"; 549 621 ] 550 622 in 551 623 let info = Cmd.info "pull" ~doc ~man in ··· 574 646 changes. This is the command to run regularly to stay up to date."; 575 647 `S "WHAT IT DOES"; 576 648 `P "The sync command performs two operations:"; 577 - `I ("1.", "Updates the registry: git pull in ~/.local/share/monopam/opamverse-registry/"); 649 + `I 650 + ( "1.", 651 + "Updates the registry: git pull in \ 652 + ~/.local/share/monopam/opamverse-registry/" ); 578 653 `I ("2.", "Pulls all tracked members: git pull in each verse/<handle>/"); 579 654 `S "USE CASES"; 580 655 `P "Run sync when you want to:"; ··· 583 658 `I ("-", "Catch up after being away for a while"); 584 659 `S "COMPARISON WITH PULL"; 585 660 `P 586 - "'verse sync' updates the registry AND pulls members. \ 587 - 'verse pull' only pulls members (skips registry update)."; 661 + "'verse sync' updates the registry AND pulls members. 'verse pull' \ 662 + only pulls members (skips registry update)."; 588 663 `S Manpage.s_examples; 589 - `Pre "# Daily sync routine\n\ 590 - cd ~/tangled\n\ 591 - monopam verse sync\n\ 592 - monopam verse status"; 664 + `Pre 665 + "# Daily sync routine\n\ 666 + cd ~/tangled\n\ 667 + monopam verse sync\n\ 668 + monopam verse status"; 593 669 ] 594 670 in 595 671 let info = Cmd.info "sync" ~doc ~man in ··· 616 692 `P 617 693 "The opamverse system enables federated collaboration across multiple \ 618 694 developers' monorepos. Each developer maintains their own monorepo \ 619 - (managed by standard monopam commands), and can track other developers' \ 620 - monorepos for code browsing, learning, and collaboration."; 695 + (managed by standard monopam commands), and can track other \ 696 + developers' monorepos for code browsing, learning, and collaboration."; 621 697 `P 622 698 "Members are identified by tangled handles - decentralized identities \ 623 699 from the AT Protocol network (the same system used by Bluesky)."; 624 700 `S "QUICK START FOR NEW USERS"; 625 701 `P "Run these commands in order to get started:"; 626 - `Pre "# Step 1: Authenticate with tangled (one-time setup)\n\ 627 - tangled auth login\n\n\ 628 - # Step 2: Create and initialize your workspace\n\ 629 - mkdir ~/tangled && cd ~/tangled\n\ 630 - monopam verse init --handle yourname.bsky.social\n\n\ 631 - # Step 3: Sync all community members\n\ 632 - monopam verse pull\n\n\ 633 - # Step 4: Browse their code\n\ 634 - ls verse/\n\ 635 - cd verse/alice.bsky.social && dune build\n\n\ 636 - # Step 5: Keep everything updated (run daily/weekly)\n\ 637 - monopam verse sync"; 702 + `Pre 703 + "# Step 1: Authenticate with tangled (one-time setup)\n\ 704 + tangled auth login\n\n\ 705 + # Step 2: Create and initialize your workspace\n\ 706 + mkdir ~/tangled && cd ~/tangled\n\ 707 + monopam verse init --handle yourname.bsky.social\n\n\ 708 + # Step 3: Sync all community members\n\ 709 + monopam verse pull\n\n\ 710 + # Step 4: Browse their code\n\ 711 + ls verse/\n\ 712 + cd verse/alice.bsky.social && dune build\n\n\ 713 + # Step 5: Keep everything updated (run daily/weekly)\n\ 714 + monopam verse sync"; 638 715 `S "KEY CONCEPTS"; 639 - `I ("Workspace", "A directory containing your monorepo plus all registry members' repos"); 640 - `I ("Registry", "A git repository listing community members and their repo URLs"); 641 - `I ("Handle", "A tangled identity like 'alice.bsky.social' validated via AT Protocol"); 716 + `I 717 + ( "Workspace", 718 + "A directory containing your monorepo plus all registry members' \ 719 + repos" ); 720 + `I 721 + ( "Registry", 722 + "A git repository listing community members and their repo URLs" ); 723 + `I 724 + ( "Handle", 725 + "A tangled identity like 'alice.bsky.social' validated via AT \ 726 + Protocol" ); 642 727 `S "WORKSPACE STRUCTURE"; 643 728 `P "An opamverse workspace has this layout:"; 644 - `Pre "~/tangled/ # workspace root\n\ 645 - ├── mono/ # YOUR monorepo\n\ 646 - ├── src/ # YOUR fork checkouts\n\ 647 - ├── opam-repo/ # YOUR opam overlay\n\ 648 - └── verse/\n\ 649 - \ ├── alice.bsky.social/ # Alice's monorepo\n\ 650 - \ ├── alice.bsky.social-opam/ # Alice's opam overlay\n\ 651 - \ ├── bob.example.com/ # Bob's monorepo\n\ 652 - \ └── bob.example.com-opam/ # Bob's opam overlay"; 729 + `Pre 730 + "~/tangled/ # workspace root\n\ 731 + ├── mono/ # YOUR monorepo\n\ 732 + ├── src/ # YOUR fork checkouts\n\ 733 + ├── opam-repo/ # YOUR opam overlay\n\ 734 + └── verse/\n\ 735 + \ ├── alice.bsky.social/ # Alice's monorepo\n\ 736 + \ ├── alice.bsky.social-opam/ # Alice's opam overlay\n\ 737 + \ ├── bob.example.com/ # Bob's monorepo\n\ 738 + \ └── bob.example.com-opam/ # Bob's opam overlay"; 653 739 `P "Configuration and data are stored in XDG directories:"; 654 - `Pre "~/.config/monopam/\n\ 655 - └── opamverse.toml # workspace configuration\n\n\ 656 - ~/.local/share/monopam/\n\ 657 - └── opamverse-registry/ # cloned registry git repo"; 740 + `Pre 741 + "~/.config/monopam/\n\ 742 + └── opamverse.toml # workspace configuration\n\n\ 743 + ~/.local/share/monopam/\n\ 744 + └── opamverse-registry/ # cloned registry git repo"; 658 745 `S "COMMAND FLOW"; 659 746 `P "The expected sequence of commands for typical workflows:"; 660 747 `P "$(b,First-time setup) (once per machine):"; 661 - `Pre "tangled auth login # authenticate\n\ 662 - monopam verse init --handle you.bsky.social # create workspace"; 748 + `Pre 749 + "tangled auth login # authenticate\n\ 750 + monopam verse init --handle you.bsky.social # create workspace"; 663 751 `P "$(b,Syncing all members):"; 664 - `Pre "monopam verse pull # clone/pull all members\n\ 665 - monopam verse status # check status"; 752 + `Pre 753 + "monopam verse pull # clone/pull all \ 754 + members\n\ 755 + monopam verse status # check status"; 666 756 `P "$(b,Daily maintenance):"; 667 - `Pre "monopam verse sync # update everything\n\ 668 - monopam verse status # check for changes"; 757 + `Pre 758 + "monopam verse sync # update everything\n\ 759 + monopam verse status # check for changes"; 669 760 `P "$(b,Working in your own monorepo):"; 670 - `Pre "cd ~/tangled/mono\n\ 671 - monopam pull # fetch upstream changes\n\ 672 - # ... make edits ...\n\ 673 - monopam push # export to checkouts"; 761 + `Pre 762 + "cd ~/tangled/mono\n\ 763 + monopam pull # fetch upstream \ 764 + changes\n\ 765 + # ... make edits ...\n\ 766 + monopam push # export to checkouts"; 674 767 `S "INTEGRATION WITH MONOPAM"; 675 768 `P 676 769 "The verse system complements standard monopam commands. Your mono/ \ 677 770 directory works exactly like a normal monopam-managed monorepo:"; 678 - `Pre "# Work in your monorepo\n\ 679 - cd ~/tangled/mono\n\ 680 - monopam status\n\ 681 - monopam pull\n\ 682 - # ... make changes ...\n\ 683 - monopam push"; 771 + `Pre 772 + "# Work in your monorepo\n\ 773 + cd ~/tangled/mono\n\ 774 + monopam status\n\ 775 + monopam pull\n\ 776 + # ... make changes ...\n\ 777 + monopam push"; 684 778 `P 685 - "The verse/ directories are for reading and learning from others' code. \ 686 - You generally don't push to them (unless you're a collaborator)."; 779 + "The verse/ directories are for reading and learning from others' \ 780 + code. You generally don't push to them (unless you're a \ 781 + collaborator)."; 687 782 `S "REGISTRY FORMAT"; 688 - `P 689 - "The registry is a git repository containing opamverse.toml:"; 690 - `Pre "[registry]\n\ 691 - name = \"tangled-community\"\n\n\ 692 - [[members]]\n\ 693 - handle = \"alice.bsky.social\"\n\ 694 - monorepo = \"https://github.com/alice/mono\""; 695 - `P 696 - "Default registry: https://tangled.org/eeg.cl.cam.ac.uk/opamverse"; 783 + `P "The registry is a git repository containing opamverse.toml:"; 784 + `Pre 785 + "[registry]\n\ 786 + name = \"tangled-community\"\n\n\ 787 + [[members]]\n\ 788 + handle = \"alice.bsky.social\"\n\ 789 + monorepo = \"https://github.com/alice/mono\""; 790 + `P "Default registry: https://tangled.org/eeg.cl.cam.ac.uk/opamverse"; 697 791 `S "COMMANDS REFERENCE"; 698 792 `I ("init", "Create a new workspace with config and directories"); 699 793 `I ("status", "Show members and their git status"); ··· 710 804 in 711 805 let info = Cmd.info "verse" ~doc ~man in 712 806 Cmd.group info 713 - [ 714 - verse_init_cmd; 715 - verse_members_cmd; 716 - verse_pull_cmd; 717 - verse_sync_cmd; 718 - ] 807 + [ verse_init_cmd; verse_members_cmd; verse_pull_cmd; verse_sync_cmd ] 719 808 720 809 (* Doctor command *) 721 810 ··· 725 814 [ 726 815 `S Manpage.s_description; 727 816 `P 728 - "Analyzes your workspace health and provides actionable recommendations. \ 729 - Uses Claude AI to analyze commits from verse collaborators, categorizing \ 730 - them by type, priority, and risk level."; 817 + "Analyzes your workspace health and provides actionable \ 818 + recommendations. Uses Claude AI to analyze commits from verse \ 819 + collaborators, categorizing them by type, priority, and risk level."; 731 820 `S "WHAT IT DOES"; 732 821 `P "The doctor command:"; 733 822 `I ("1.", "Syncs the workspace (unless $(b,--no-sync) is specified)"); ··· 736 825 `I ("4.", "Analyzes fork relationships with verse members"); 737 826 `I ("5.", "Uses Claude to categorize and prioritize their commits"); 738 827 `I ("6.", "Generates actionable recommendations"); 739 - `P "The status output from $(b,monopam status) is automatically included \ 740 - in the prompt sent to Claude, so Claude doesn't need to run it separately."; 828 + `P 829 + "The status output from $(b,monopam status) is automatically included \ 830 + in the prompt sent to Claude, so Claude doesn't need to run it \ 831 + separately."; 741 832 `S "OUTPUT FORMATS"; 742 833 `P "By default, outputs human-readable text with colors."; 743 834 `P "Use $(b,--json) for JSON output suitable for tooling."; ··· 777 868 Fmt.pr "Warning: sync failed: %a@." Monopam.pp_error_with_hint e; 778 869 Fmt.pr "Continuing with analysis...@." 779 870 end; 780 - let report = Monopam.Doctor.analyze ~proc ~fs ~config ~verse_config ~clock ?package ~no_sync () in 781 - if json then 782 - print_endline (Monopam.Doctor.to_json report) 783 - else 784 - Fmt.pr "%a@." Monopam.Doctor.pp_report report; 871 + let report = 872 + Monopam.Doctor.analyze ~proc ~fs ~config ~verse_config ~clock ?package 873 + ~no_sync () 874 + in 875 + if json then print_endline (Monopam.Doctor.to_json report) 876 + else Fmt.pr "%a@." Monopam.Doctor.pp_report report; 785 877 `Ok () 786 878 in 787 - Cmd.v info Term.(ret (const run $ package_arg $ json_arg $ no_sync_arg $ logging_term)) 879 + Cmd.v info 880 + Term.(ret (const run $ package_arg $ json_arg $ no_sync_arg $ logging_term)) 788 881 789 882 (* Main command group *) 790 883 ··· 813 906 `P "Monopam manages three directory trees:"; 814 907 `I 815 908 ( "mono/", 816 - "The monorepo combining all packages as git subtrees. This is where you \ 817 - make changes." ); 909 + "The monorepo combining all packages as git subtrees. This is where \ 910 + you make changes." ); 818 911 `I 819 912 ( "src/", 820 913 "Individual git checkouts of each unique repository. Used for review \ ··· 833 926 `I 834 927 ( "4. monopam sync --remote", 835 928 "Sync again, including pushing to upstream git remotes" ); 836 - `P 837 - "For finer control, use $(b,push) and $(b,pull) separately:"; 929 + `P "For finer control, use $(b,push) and $(b,pull) separately:"; 838 930 `I 839 931 ( "monopam push", 840 932 "Export monorepo changes to checkouts (for manual review/push)" ); 841 933 `I 842 934 ( "monopam pull", 843 - "Pull remote changes into monorepo (when you know there are no local changes)" ); 935 + "Pull remote changes into monorepo (when you know there are no local \ 936 + changes)" ); 844 937 `S "CHECKING STATUS"; 845 938 `P "Run $(b,monopam status) to see the state of all repositories:"; 846 939 `I ("local:+N", "Your monorepo is N commits ahead of the checkout");
+475 -353
lib/changes.ml
··· 1 1 (** Changelog generation for monopam. 2 2 3 - This module handles generating weekly and daily changelog entries using Claude AI 4 - to analyze git commit history and produce user-facing change summaries. 3 + This module handles generating weekly and daily changelog entries using 4 + Claude AI to analyze git commit history and produce user-facing change 5 + summaries. 5 6 6 7 Changes are stored in a .changes directory at the monorepo root: 7 8 - .changes/<repo_name>.json - weekly changelog entries 8 - - .changes/<repo_name>-<YYYY-MM-DD>.json - daily changelog entries (one file per day per repo) 9 + - .changes/<repo_name>-<YYYY-MM-DD>.json - daily changelog entries (one file 10 + per day per repo) 9 11 - .changes/YYYYMMDD.json - aggregated daily changes for broadcasting 10 12 11 13 {1 Submodules} 12 14 13 15 - {!Aggregated} - Types and I/O for aggregated daily changes (YYYYMMDD.json) 14 - - {!Daily} - Types and I/O for per-day-per-repo changes (repo-YYYY-MM-DD.json) 16 + - {!Daily} - Types and I/O for per-day-per-repo changes 17 + (repo-YYYY-MM-DD.json) 15 18 - {!Query} - High-level query interface for changes *) 16 19 20 + module Aggregated = Changes_aggregated 17 21 (** Re-export submodules for querying changes *) 18 - module Aggregated = Changes_aggregated 22 + 19 23 module Daily = Changes_daily 20 24 module Query = Changes_query 21 25 22 - type commit_range = { 23 - from_hash : string; 24 - to_hash : string; 25 - count : int; 26 - } 26 + type commit_range = { from_hash : string; to_hash : string; count : int } 27 27 28 28 type weekly_entry = { 29 - week_start : string; (* ISO date YYYY-MM-DD, Monday *) 30 - week_end : string; (* ISO date YYYY-MM-DD, Sunday *) 31 - summary : string; (* One-line summary *) 29 + week_start : string; (* ISO date YYYY-MM-DD, Monday *) 30 + week_end : string; (* ISO date YYYY-MM-DD, Sunday *) 31 + summary : string; (* One-line summary *) 32 32 changes : string list; (* Bullet points *) 33 33 commit_range : commit_range; 34 34 } 35 35 36 36 type daily_entry = { 37 - date : string; (* ISO date YYYY-MM-DD *) 38 - hour : int; (* Hour of day 0-23 *) 39 - timestamp : Ptime.t; (* RFC3339 timestamp for precise ordering *) 40 - summary : string; (* One-line summary *) 37 + date : string; (* ISO date YYYY-MM-DD *) 38 + hour : int; (* Hour of day 0-23 *) 39 + timestamp : Ptime.t; (* RFC3339 timestamp for precise ordering *) 40 + summary : string; (* One-line summary *) 41 41 changes : string list; (* Bullet points *) 42 42 commit_range : commit_range; 43 43 contributors : string list; (* List of contributors for this entry *) 44 44 repo_url : string option; (* Upstream repository URL *) 45 45 } 46 46 47 - type changes_file = { 48 - repository : string; 49 - entries : weekly_entry list; 50 - } 51 - 52 - type daily_changes_file = { 53 - repository : string; 54 - entries : daily_entry list; 55 - } 47 + type changes_file = { repository : string; entries : weekly_entry list } 48 + type daily_changes_file = { repository : string; entries : daily_entry list } 56 49 57 50 (** Mode for changelog generation *) 58 51 type mode = Weekly | Daily ··· 72 65 { week_start; week_end; summary; changes; commit_range } 73 66 in 74 67 Jsont.Object.map ~kind:"weekly_entry" make 75 - |> Jsont.Object.mem "week_start" Jsont.string ~enc:(fun (e : weekly_entry) -> e.week_start) 76 - |> Jsont.Object.mem "week_end" Jsont.string ~enc:(fun (e : weekly_entry) -> e.week_end) 77 - |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun (e : weekly_entry) -> e.summary) 78 - |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun (e : weekly_entry) -> e.changes) 79 - |> Jsont.Object.mem "commit_range" commit_range_jsont ~enc:(fun (e : weekly_entry) -> e.commit_range) 68 + |> Jsont.Object.mem "week_start" Jsont.string ~enc:(fun (e : weekly_entry) -> 69 + e.week_start) 70 + |> Jsont.Object.mem "week_end" Jsont.string ~enc:(fun (e : weekly_entry) -> 71 + e.week_end) 72 + |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun (e : weekly_entry) -> 73 + e.summary) 74 + |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) 75 + ~enc:(fun (e : weekly_entry) -> e.changes) 76 + |> Jsont.Object.mem "commit_range" commit_range_jsont 77 + ~enc:(fun (e : weekly_entry) -> e.commit_range) 80 78 |> Jsont.Object.finish 81 79 82 80 let changes_file_jsont : changes_file Jsont.t = 83 81 let make repository entries : changes_file = { repository; entries } in 84 82 Jsont.Object.map ~kind:"changes_file" make 85 - |> Jsont.Object.mem "repository" Jsont.string ~enc:(fun (f : changes_file) -> f.repository) 86 - |> Jsont.Object.mem "entries" (Jsont.list weekly_entry_jsont) ~enc:(fun (f : changes_file) -> f.entries) 83 + |> Jsont.Object.mem "repository" Jsont.string ~enc:(fun (f : changes_file) -> 84 + f.repository) 85 + |> Jsont.Object.mem "entries" (Jsont.list weekly_entry_jsont) 86 + ~enc:(fun (f : changes_file) -> f.entries) 87 87 |> Jsont.Object.finish 88 88 89 89 let ptime_jsont = 90 - let enc t = 91 - Ptime.to_rfc3339 t ~tz_offset_s:0 92 - in 90 + let enc t = Ptime.to_rfc3339 t ~tz_offset_s:0 in 93 91 let dec s = 94 92 match Ptime.of_rfc3339 s with 95 93 | Ok (t, _, _) -> t ··· 98 96 Jsont.map ~dec ~enc Jsont.string 99 97 100 98 let daily_entry_jsont : daily_entry Jsont.t = 101 - let make date hour timestamp summary changes commit_range contributors repo_url : daily_entry = 102 - { date; hour; timestamp; summary; changes; commit_range; contributors; repo_url } 99 + let make date hour timestamp summary changes commit_range contributors 100 + repo_url : daily_entry = 101 + { 102 + date; 103 + hour; 104 + timestamp; 105 + summary; 106 + changes; 107 + commit_range; 108 + contributors; 109 + repo_url; 110 + } 103 111 in 104 112 (* Default hour and timestamp for backwards compat when reading old files *) 105 113 let default_hour = 0 in 106 114 let default_timestamp = Ptime.epoch in 107 115 Jsont.Object.map ~kind:"daily_entry" make 108 116 |> Jsont.Object.mem "date" Jsont.string ~enc:(fun (e : daily_entry) -> e.date) 109 - |> Jsont.Object.mem "hour" Jsont.int ~dec_absent:default_hour ~enc:(fun (e : daily_entry) -> e.hour) 110 - |> Jsont.Object.mem "timestamp" ptime_jsont ~dec_absent:default_timestamp ~enc:(fun (e : daily_entry) -> e.timestamp) 111 - |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun (e : daily_entry) -> e.summary) 112 - |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun (e : daily_entry) -> e.changes) 113 - |> Jsont.Object.mem "commit_range" commit_range_jsont ~enc:(fun (e : daily_entry) -> e.commit_range) 114 - |> Jsont.Object.mem "contributors" (Jsont.list Jsont.string) ~dec_absent:[] ~enc:(fun (e : daily_entry) -> e.contributors) 115 - |> Jsont.Object.mem "repo_url" (Jsont.option Jsont.string) ~dec_absent:None ~enc:(fun (e : daily_entry) -> e.repo_url) 117 + |> Jsont.Object.mem "hour" Jsont.int ~dec_absent:default_hour 118 + ~enc:(fun (e : daily_entry) -> e.hour) 119 + |> Jsont.Object.mem "timestamp" ptime_jsont ~dec_absent:default_timestamp 120 + ~enc:(fun (e : daily_entry) -> e.timestamp) 121 + |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun (e : daily_entry) -> 122 + e.summary) 123 + |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) 124 + ~enc:(fun (e : daily_entry) -> e.changes) 125 + |> Jsont.Object.mem "commit_range" commit_range_jsont 126 + ~enc:(fun (e : daily_entry) -> e.commit_range) 127 + |> Jsont.Object.mem "contributors" (Jsont.list Jsont.string) ~dec_absent:[] 128 + ~enc:(fun (e : daily_entry) -> e.contributors) 129 + |> Jsont.Object.mem "repo_url" (Jsont.option Jsont.string) ~dec_absent:None 130 + ~enc:(fun (e : daily_entry) -> e.repo_url) 116 131 |> Jsont.Object.finish 117 132 118 133 let daily_changes_file_jsont : daily_changes_file Jsont.t = 119 134 let make repository entries : daily_changes_file = { repository; entries } in 120 135 Jsont.Object.map ~kind:"daily_changes_file" make 121 - |> Jsont.Object.mem "repository" Jsont.string ~enc:(fun (f : daily_changes_file) -> f.repository) 122 - |> Jsont.Object.mem "entries" (Jsont.list daily_entry_jsont) ~enc:(fun (f : daily_changes_file) -> f.entries) 136 + |> Jsont.Object.mem "repository" Jsont.string 137 + ~enc:(fun (f : daily_changes_file) -> f.repository) 138 + |> Jsont.Object.mem "entries" (Jsont.list daily_entry_jsont) 139 + ~enc:(fun (f : daily_changes_file) -> f.entries) 123 140 |> Jsont.Object.finish 124 141 125 142 (* File I/O *) ··· 134 151 135 152 (* Load weekly changes from .changes/<repo>.json in monorepo *) 136 153 let load ~fs ~monorepo repo_name = 137 - let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / (repo_name ^ ".json")) in 154 + let file_path = 155 + Eio.Path.( 156 + fs / Fpath.to_string monorepo / ".changes" / (repo_name ^ ".json")) 157 + in 138 158 match Eio.Path.kind ~follow:true file_path with 139 159 | `Regular_file -> ( 140 160 let content = Eio.Path.load file_path in 141 161 match Jsont_bytesrw.decode_string changes_file_jsont content with 142 162 | Ok cf -> Ok cf 143 - | Error e -> Error (Format.sprintf "Failed to parse %s.json: %s" repo_name e)) 163 + | Error e -> 164 + Error (Format.sprintf "Failed to parse %s.json: %s" repo_name e)) 144 165 | _ -> Ok { repository = repo_name; entries = [] } 145 166 | exception Eio.Io _ -> Ok { repository = repo_name; entries = [] } 146 167 147 168 (* Save weekly changes to .changes/<repo>.json in monorepo *) 148 169 let save ~fs ~monorepo (cf : changes_file) = 149 170 ensure_changes_dir ~fs monorepo; 150 - let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / (cf.repository ^ ".json")) in 151 - match Jsont_bytesrw.encode_string ~format:Jsont.Indent changes_file_jsont cf with 171 + let file_path = 172 + Eio.Path.( 173 + fs / Fpath.to_string monorepo / ".changes" / (cf.repository ^ ".json")) 174 + in 175 + match 176 + Jsont_bytesrw.encode_string ~format:Jsont.Indent changes_file_jsont cf 177 + with 152 178 | Ok content -> 153 179 Eio.Path.save ~create:(`Or_truncate 0o644) file_path content; 154 180 Ok () 155 - | Error e -> Error (Format.sprintf "Failed to encode %s.json: %s" cf.repository e) 181 + | Error e -> 182 + Error (Format.sprintf "Failed to encode %s.json: %s" cf.repository e) 156 183 157 184 (* Filename for daily changes: <repo>-<YYYY-MM-DD>.json *) 158 - let daily_filename repo_name date = 159 - repo_name ^ "-" ^ date ^ ".json" 185 + let daily_filename repo_name date = repo_name ^ "-" ^ date ^ ".json" 160 186 161 187 (* Check if daily file exists on disk *) 162 188 let daily_exists ~fs ~monorepo ~date repo_name = 163 189 let filename = daily_filename repo_name date in 164 - let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) in 190 + let file_path = 191 + Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) 192 + in 165 193 match Eio.Path.kind ~follow:true file_path with 166 194 | `Regular_file -> true 167 195 | _ -> false ··· 170 198 (* Load daily changes from .changes/<repo>-<date>.json in monorepo *) 171 199 let load_daily ~fs ~monorepo ~date repo_name = 172 200 let filename = daily_filename repo_name date in 173 - let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) in 201 + let file_path = 202 + Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) 203 + in 174 204 match Eio.Path.kind ~follow:true file_path with 175 205 | `Regular_file -> ( 176 206 let content = Eio.Path.load file_path in ··· 184 214 let save_daily ~fs ~monorepo ~date (cf : daily_changes_file) = 185 215 ensure_changes_dir ~fs monorepo; 186 216 let filename = daily_filename cf.repository date in 187 - let file_path = Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) in 188 - match Jsont_bytesrw.encode_string ~format:Jsont.Indent daily_changes_file_jsont cf with 217 + let file_path = 218 + Eio.Path.(fs / Fpath.to_string monorepo / ".changes" / filename) 219 + in 220 + match 221 + Jsont_bytesrw.encode_string ~format:Jsont.Indent daily_changes_file_jsont cf 222 + with 189 223 | Ok content -> 190 224 Eio.Path.save ~create:(`Or_truncate 0o644) file_path content; 191 225 Ok () ··· 196 230 let to_markdown (cf : changes_file) = 197 231 let buf = Buffer.create 1024 in 198 232 Buffer.add_string buf (Printf.sprintf "# %s Changelog\n\n" cf.repository); 199 - List.iter (fun (entry : weekly_entry) -> 200 - Buffer.add_string buf (Printf.sprintf "## Week of %s to %s\n\n" entry.week_start entry.week_end); 201 - Buffer.add_string buf (Printf.sprintf "%s\n\n" entry.summary); 202 - List.iter (fun change -> 203 - Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 204 - entry.changes; 205 - Buffer.add_string buf "\n") 233 + List.iter 234 + (fun (entry : weekly_entry) -> 235 + Buffer.add_string buf 236 + (Printf.sprintf "## Week of %s to %s\n\n" entry.week_start 237 + entry.week_end); 238 + Buffer.add_string buf (Printf.sprintf "%s\n\n" entry.summary); 239 + List.iter 240 + (fun change -> Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 241 + entry.changes; 242 + Buffer.add_string buf "\n") 206 243 cf.entries; 207 244 Buffer.contents buf 208 245 209 246 let aggregate ~history (cfs : changes_file list) = 210 247 (* Collect all entries from all files, tagged with repository *) 211 248 let all_entries = 212 - List.concat_map (fun (cf : changes_file) -> 213 - List.map (fun (e : weekly_entry) -> (cf.repository, e)) cf.entries) 249 + List.concat_map 250 + (fun (cf : changes_file) -> 251 + List.map (fun (e : weekly_entry) -> (cf.repository, e)) cf.entries) 214 252 cfs 215 253 in 216 254 (* Sort by week_start descending *) 217 - let sorted = List.sort (fun (_, (e1 : weekly_entry)) (_, (e2 : weekly_entry)) -> 218 - String.compare e2.week_start e1.week_start) all_entries 255 + let sorted = 256 + List.sort 257 + (fun (_, (e1 : weekly_entry)) (_, (e2 : weekly_entry)) -> 258 + String.compare e2.week_start e1.week_start) 259 + all_entries 219 260 in 220 261 (* Group by week *) 221 262 let rec group_by_week acc current_week current_group = function 222 263 | [] -> 223 - if current_group <> [] then (current_week, List.rev current_group) :: acc 264 + if current_group <> [] then 265 + (current_week, List.rev current_group) :: acc 224 266 else acc 225 267 | (repo, (entry : weekly_entry)) :: rest -> 226 268 let week_key = entry.week_start ^ " to " ^ entry.week_end in ··· 230 272 group_by_week 231 273 ((current_week, List.rev current_group) :: acc) 232 274 week_key 233 - [(repo, entry)] 275 + [ (repo, entry) ] 234 276 rest 235 277 in 236 278 let grouped = List.rev (group_by_week [] "" [] sorted) in 237 279 (* Take only the requested number of weeks *) 238 280 let limited = 239 - if history > 0 then 240 - List.filteri (fun i _ -> i < history) grouped 281 + if history > 0 then List.filteri (fun i _ -> i < history) grouped 241 282 else grouped 242 283 in 243 284 (* Generate markdown *) 244 285 let buf = Buffer.create 4096 in 245 286 Buffer.add_string buf "# Changelog\n\n"; 246 - List.iter (fun (week_key, entries) -> 247 - Buffer.add_string buf (Printf.sprintf "## Week of %s\n\n" week_key); 248 - List.iter (fun (repo, (entry : weekly_entry)) -> 249 - Buffer.add_string buf (Printf.sprintf "### %s\n" repo); 250 - Buffer.add_string buf (Printf.sprintf "%s\n" entry.summary); 251 - List.iter (fun change -> 252 - Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 253 - entry.changes; 254 - Buffer.add_string buf "\n") 255 - entries) 287 + List.iter 288 + (fun (week_key, entries) -> 289 + Buffer.add_string buf (Printf.sprintf "## Week of %s\n\n" week_key); 290 + List.iter 291 + (fun (repo, (entry : weekly_entry)) -> 292 + Buffer.add_string buf (Printf.sprintf "### %s\n" repo); 293 + Buffer.add_string buf (Printf.sprintf "%s\n" entry.summary); 294 + List.iter 295 + (fun change -> 296 + Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 297 + entry.changes; 298 + Buffer.add_string buf "\n") 299 + entries) 256 300 limited; 257 301 Buffer.contents buf 258 302 ··· 266 310 let q = day in 267 311 let k = y mod 100 in 268 312 let j = y / 100 in 269 - let h = (q + (13 * (m + 1)) / 5 + k + k / 4 + j / 4 - 2 * j) mod 7 in 313 + let h = (q + (13 * (m + 1) / 5) + k + (k / 4) + (j / 4) - (2 * j)) mod 7 in 270 314 (* Convert from Zeller's (0=Sat) to standard (0=Sun) *) 271 - ((h + 6) mod 7) 315 + (h + 6) mod 7 272 316 273 317 let add_days (y, m, d) n = 274 318 (* Simple day addition - handles month/year boundaries *) ··· 276 320 match month with 277 321 | 1 | 3 | 5 | 7 | 8 | 10 | 12 -> 31 278 322 | 4 | 6 | 9 | 11 -> 30 279 - | 2 -> if (year mod 4 = 0 && year mod 100 <> 0) || year mod 400 = 0 then 29 else 28 323 + | 2 -> 324 + if (year mod 4 = 0 && year mod 100 <> 0) || year mod 400 = 0 then 29 325 + else 28 280 326 | _ -> 30 281 327 in 282 328 let rec loop y m d n = ··· 289 335 let new_m = if m = 12 then 1 else m + 1 in 290 336 let new_y = if m = 12 then y + 1 else y in 291 337 loop new_y new_m 1 (n - remaining - 1) 292 - else (* n < 0 *) 293 - if d + n >= 1 then (y, m, d + n) 294 - else 295 - let new_m = if m = 1 then 12 else m - 1 in 296 - let new_y = if m = 1 then y - 1 else y in 297 - let dim = days_in_month new_y new_m in 298 - loop new_y new_m dim (n + d) 338 + else if 339 + (* n < 0 *) 340 + d + n >= 1 341 + then (y, m, d + n) 342 + else 343 + let new_m = if m = 1 then 12 else m - 1 in 344 + let new_y = if m = 1 then y - 1 else y in 345 + let dim = days_in_month new_y new_m in 346 + loop new_y new_m dim (n + d) 299 347 in 300 348 loop y m d n 301 349 302 - let format_date (y, m, d) = 303 - Printf.sprintf "%04d-%02d-%02d" y m d 350 + let format_date (y, m, d) = Printf.sprintf "%04d-%02d-%02d" y m d 304 351 305 352 let week_of_date (y, m, d) = 306 353 let dow = day_of_week y m d in ··· 330 377 let aggregate_daily ~history (cfs : daily_changes_file list) = 331 378 (* Collect all entries from all files, tagged with repository *) 332 379 let all_entries = 333 - List.concat_map (fun (cf : daily_changes_file) -> 334 - List.map (fun (e : daily_entry) -> (cf.repository, e)) cf.entries) 380 + List.concat_map 381 + (fun (cf : daily_changes_file) -> 382 + List.map (fun (e : daily_entry) -> (cf.repository, e)) cf.entries) 335 383 cfs 336 384 in 337 385 (* Sort by date descending *) 338 - let sorted = List.sort (fun (_, (e1 : daily_entry)) (_, (e2 : daily_entry)) -> 339 - String.compare e2.date e1.date) all_entries 386 + let sorted = 387 + List.sort 388 + (fun (_, (e1 : daily_entry)) (_, (e2 : daily_entry)) -> 389 + String.compare e2.date e1.date) 390 + all_entries 340 391 in 341 392 (* Group by date *) 342 393 let rec group_by_date acc current_date current_group = function 343 394 | [] -> 344 - if current_group <> [] then (current_date, List.rev current_group) :: acc 395 + if current_group <> [] then 396 + (current_date, List.rev current_group) :: acc 345 397 else acc 346 398 | (repo, (entry : daily_entry)) :: rest -> 347 399 if current_date = "" || current_date = entry.date then ··· 350 402 group_by_date 351 403 ((current_date, List.rev current_group) :: acc) 352 404 entry.date 353 - [(repo, entry)] 405 + [ (repo, entry) ] 354 406 rest 355 407 in 356 408 let grouped = List.rev (group_by_date [] "" [] sorted) in 357 409 (* Take only the requested number of days *) 358 410 let limited = 359 - if history > 0 then 360 - List.filteri (fun i _ -> i < history) grouped 411 + if history > 0 then List.filteri (fun i _ -> i < history) grouped 361 412 else grouped 362 413 in 363 414 (* Generate markdown - only include repos with actual changes *) 364 415 let buf = Buffer.create 4096 in 365 416 Buffer.add_string buf "# Daily Changelog\n\n"; 366 - List.iter (fun (date, entries) -> 367 - (* Filter out entries with empty changes - these are repos with no changes *) 368 - let entries_with_changes = List.filter (fun (_, (entry : daily_entry)) -> 369 - entry.changes <> []) entries 370 - in 371 - if entries_with_changes <> [] then begin 372 - Buffer.add_string buf (Printf.sprintf "## %s\n\n" date); 373 - List.iter (fun (repo, (entry : daily_entry)) -> 374 - (* Format repo name with link if URL available *) 375 - let repo_header = match entry.repo_url with 376 - | Some url -> Printf.sprintf "[%s](%s)" repo url 377 - | None -> repo 378 - in 379 - Buffer.add_string buf (Printf.sprintf "### %s\n\n" repo_header); 380 - Buffer.add_string buf (Printf.sprintf "%s\n\n" entry.summary); 381 - List.iter (fun change -> 382 - Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 383 - entry.changes; 384 - (* Add contributors if any *) 385 - if entry.contributors <> [] then begin 386 - let contributors_str = String.concat ", " entry.contributors in 387 - Buffer.add_string buf (Printf.sprintf "\n*Contributors: %s*\n" contributors_str) 388 - end; 389 - Buffer.add_string buf "\n") 390 - entries_with_changes 391 - end) 417 + List.iter 418 + (fun (date, entries) -> 419 + (* Filter out entries with empty changes - these are repos with no changes *) 420 + let entries_with_changes = 421 + List.filter 422 + (fun (_, (entry : daily_entry)) -> entry.changes <> []) 423 + entries 424 + in 425 + if entries_with_changes <> [] then begin 426 + Buffer.add_string buf (Printf.sprintf "## %s\n\n" date); 427 + List.iter 428 + (fun (repo, (entry : daily_entry)) -> 429 + (* Format repo name with link if URL available *) 430 + let repo_header = 431 + match entry.repo_url with 432 + | Some url -> Printf.sprintf "[%s](%s)" repo url 433 + | None -> repo 434 + in 435 + Buffer.add_string buf (Printf.sprintf "### %s\n\n" repo_header); 436 + Buffer.add_string buf (Printf.sprintf "%s\n\n" entry.summary); 437 + List.iter 438 + (fun change -> 439 + Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 440 + entry.changes; 441 + (* Add contributors if any *) 442 + if entry.contributors <> [] then begin 443 + let contributors_str = String.concat ", " entry.contributors in 444 + Buffer.add_string buf 445 + (Printf.sprintf "\n*Contributors: %s*\n" contributors_str) 446 + end; 447 + Buffer.add_string buf "\n") 448 + entries_with_changes 449 + end) 392 450 limited; 393 451 Buffer.contents buf 394 452 ··· 396 454 397 455 let generate_weekly_prompt ~repository ~week_start ~week_end commits = 398 456 let buf = Buffer.create 4096 in 399 - Buffer.add_string buf (Printf.sprintf 400 - "You are analyzing git commits for the OCaml library \"%s\".\n" repository); 401 - Buffer.add_string buf (Printf.sprintf 402 - "Generate a user-facing changelog entry for the week of %s to %s.\n\n" 403 - week_start week_end); 457 + Buffer.add_string buf 458 + (Printf.sprintf 459 + "You are analyzing git commits for the OCaml library \"%s\".\n" 460 + repository); 461 + Buffer.add_string buf 462 + (Printf.sprintf 463 + "Generate a user-facing changelog entry for the week of %s to %s.\n\n" 464 + week_start week_end); 404 465 Buffer.add_string buf "## Commits this week:\n\n"; 405 - List.iter (fun (commit : Git.log_entry) -> 406 - Buffer.add_string buf (Printf.sprintf "### %s by %s (%s)\n" 407 - (String.sub commit.hash 0 (min 7 (String.length commit.hash))) 408 - commit.author commit.date); 409 - Buffer.add_string buf (Printf.sprintf "%s\n\n" commit.subject); 410 - if commit.body <> "" then begin 411 - Buffer.add_string buf (Printf.sprintf "%s\n" commit.body) 412 - end; 413 - Buffer.add_string buf "---\n\n") 466 + List.iter 467 + (fun (commit : Git.log_entry) -> 468 + Buffer.add_string buf 469 + (Printf.sprintf "### %s by %s (%s)\n" 470 + (String.sub commit.hash 0 (min 7 (String.length commit.hash))) 471 + commit.author commit.date); 472 + Buffer.add_string buf (Printf.sprintf "%s\n\n" commit.subject); 473 + if commit.body <> "" then begin 474 + Buffer.add_string buf (Printf.sprintf "%s\n" commit.body) 475 + end; 476 + Buffer.add_string buf "---\n\n") 414 477 commits; 415 - Buffer.add_string buf {|## Instructions: 478 + Buffer.add_string buf 479 + {|## Instructions: 416 480 417 481 1. Focus on USER-FACING changes only. Skip: 418 482 - Internal refactoring with no API impact ··· 445 509 446 510 let generate_daily_prompt ~repository ~date commits = 447 511 let buf = Buffer.create 4096 in 448 - Buffer.add_string buf (Printf.sprintf 449 - "You are analyzing git commits for the OCaml library \"%s\".\n" repository); 450 - Buffer.add_string buf (Printf.sprintf 451 - "Generate a user-facing changelog entry for %s.\n\n" date); 512 + Buffer.add_string buf 513 + (Printf.sprintf 514 + "You are analyzing git commits for the OCaml library \"%s\".\n" 515 + repository); 516 + Buffer.add_string buf 517 + (Printf.sprintf "Generate a user-facing changelog entry for %s.\n\n" date); 452 518 Buffer.add_string buf "## Commits today:\n\n"; 453 - List.iter (fun (commit : Git.log_entry) -> 454 - Buffer.add_string buf (Printf.sprintf "### %s by %s (%s)\n" 455 - (String.sub commit.hash 0 (min 7 (String.length commit.hash))) 456 - commit.author commit.date); 457 - Buffer.add_string buf (Printf.sprintf "%s\n\n" commit.subject); 458 - if commit.body <> "" then begin 459 - Buffer.add_string buf (Printf.sprintf "%s\n" commit.body) 460 - end; 461 - Buffer.add_string buf "---\n\n") 519 + List.iter 520 + (fun (commit : Git.log_entry) -> 521 + Buffer.add_string buf 522 + (Printf.sprintf "### %s by %s (%s)\n" 523 + (String.sub commit.hash 0 (min 7 (String.length commit.hash))) 524 + commit.author commit.date); 525 + Buffer.add_string buf (Printf.sprintf "%s\n\n" commit.subject); 526 + if commit.body <> "" then begin 527 + Buffer.add_string buf (Printf.sprintf "%s\n" commit.body) 528 + end; 529 + Buffer.add_string buf "---\n\n") 462 530 commits; 463 - Buffer.add_string buf {|## Instructions: 531 + Buffer.add_string buf 532 + {|## Instructions: 464 533 465 534 1. Focus on USER-FACING changes only. Skip: 466 535 - Internal refactoring with no API impact ··· 496 565 497 566 (* Response parsing *) 498 567 499 - type claude_response = { 500 - summary : string; 501 - changes : string list; 502 - } 568 + type claude_response = { summary : string; changes : string list } 503 569 504 570 let claude_response_jsont = 505 571 let make summary changes = { summary; changes } in 506 572 Jsont.Object.map ~kind:"claude_response" make 507 573 |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun r -> r.summary) 508 - |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun r -> r.changes) 574 + |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun r -> 575 + r.changes) 509 576 |> Jsont.Object.finish 510 577 511 578 let parse_claude_response text = ··· 516 583 match Jsont_bytesrw.decode_string claude_response_jsont text with 517 584 | Ok r -> 518 585 (* Treat empty summary and changes as no changes *) 519 - if r.summary = "" && r.changes = [] then Ok None 520 - else Ok (Some r) 586 + if r.summary = "" && r.changes = [] then Ok None else Ok (Some r) 521 587 | Error e -> Error (Format.sprintf "Failed to parse Claude response: %s" e) 522 588 523 589 (* Main analysis function *) 524 590 525 - let analyze_commits 526 - ~sw 527 - ~process_mgr 528 - ~clock 529 - ~repository 530 - ~week_start 531 - ~week_end 591 + let analyze_commits ~sw ~process_mgr ~clock ~repository ~week_start ~week_end 532 592 commits = 533 593 if commits = [] then Ok None 534 594 else begin ··· 537 597 (* Create Claude options with structured output *) 538 598 let output_schema = 539 599 let open Jsont in 540 - Object ([ 541 - (("type", Meta.none), String ("object", Meta.none)); 542 - (("properties", Meta.none), Object ([ 543 - (("summary", Meta.none), Object ([ 544 - (("type", Meta.none), String ("string", Meta.none)); 545 - ], Meta.none)); 546 - (("changes", Meta.none), Object ([ 547 - (("type", Meta.none), String ("array", Meta.none)); 548 - (("items", Meta.none), Object ([ 549 - (("type", Meta.none), String ("string", Meta.none)); 550 - ], Meta.none)); 551 - ], Meta.none)); 552 - ], Meta.none)); 553 - (("required", Meta.none), Array ([ 554 - String ("summary", Meta.none); 555 - String ("changes", Meta.none); 556 - ], Meta.none)); 557 - ], Meta.none) 600 + Object 601 + ( [ 602 + (("type", Meta.none), String ("object", Meta.none)); 603 + ( ("properties", Meta.none), 604 + Object 605 + ( [ 606 + ( ("summary", Meta.none), 607 + Object 608 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 609 + Meta.none ) ); 610 + ( ("changes", Meta.none), 611 + Object 612 + ( [ 613 + (("type", Meta.none), String ("array", Meta.none)); 614 + ( ("items", Meta.none), 615 + Object 616 + ( [ 617 + ( ("type", Meta.none), 618 + String ("string", Meta.none) ); 619 + ], 620 + Meta.none ) ); 621 + ], 622 + Meta.none ) ); 623 + ], 624 + Meta.none ) ); 625 + ( ("required", Meta.none), 626 + Array 627 + ( [ 628 + String ("summary", Meta.none); String ("changes", Meta.none); 629 + ], 630 + Meta.none ) ); 631 + ], 632 + Meta.none ) 558 633 in 559 - let output_format = Claude.Proto.Structured_output.of_json_schema output_schema in 634 + let output_format = 635 + Claude.Proto.Structured_output.of_json_schema output_schema 636 + in 560 637 let options = 561 638 Claude.Options.default 562 639 |> Claude.Options.with_output_format output_format ··· 568 645 569 646 let responses = Claude.Client.receive_all client in 570 647 let result = ref None in 571 - List.iter (function 572 - | Claude.Response.Complete c -> ( 573 - match Claude.Response.Complete.structured_output c with 574 - | Some json -> ( 575 - match Jsont.Json.decode claude_response_jsont json with 576 - | Ok r -> result := Some (Ok (Some r)) 577 - | Error e -> 578 - result := Some (Error (Format.sprintf "Failed to decode response: %s" e))) 579 - | None -> 580 - (* Try to get text and parse it as fallback *) 581 - match Claude.Response.Complete.result_text c with 582 - | Some text -> result := Some (parse_claude_response text) 583 - | None -> result := Some (Ok None)) 584 - | Claude.Response.Text t -> 585 - let text = Claude.Response.Text.content t in 586 - if String.trim text = "NO_CHANGES" then 587 - result := Some (Ok None) 588 - | Claude.Response.Error e -> 589 - result := Some (Error (Printf.sprintf "Claude error: %s" (Claude.Response.Error.message e))) 590 - | _ -> ()) 648 + List.iter 649 + (function 650 + | Claude.Response.Complete c -> ( 651 + match Claude.Response.Complete.structured_output c with 652 + | Some json -> ( 653 + match Jsont.Json.decode claude_response_jsont json with 654 + | Ok r -> result := Some (Ok (Some r)) 655 + | Error e -> 656 + result := 657 + Some 658 + (Error 659 + (Format.sprintf "Failed to decode response: %s" e))) 660 + | None -> ( 661 + (* Try to get text and parse it as fallback *) 662 + match Claude.Response.Complete.result_text c with 663 + | Some text -> result := Some (parse_claude_response text) 664 + | None -> result := Some (Ok None))) 665 + | Claude.Response.Text t -> 666 + let text = Claude.Response.Text.content t in 667 + if String.trim text = "NO_CHANGES" then result := Some (Ok None) 668 + | Claude.Response.Error e -> 669 + result := 670 + Some 671 + (Error 672 + (Printf.sprintf "Claude error: %s" 673 + (Claude.Response.Error.message e))) 674 + | _ -> ()) 591 675 responses; 592 676 593 - match !result with 594 - | Some r -> r 595 - | None -> Ok None 677 + match !result with Some r -> r | None -> Ok None 596 678 end 597 679 598 680 (* Daily analysis function *) 599 - let analyze_commits_daily 600 - ~sw 601 - ~process_mgr 602 - ~clock 603 - ~repository 604 - ~date 605 - commits = 681 + let analyze_commits_daily ~sw ~process_mgr ~clock ~repository ~date commits = 606 682 if commits = [] then Ok None 607 683 else begin 608 684 let prompt = generate_daily_prompt ~repository ~date commits in ··· 610 686 (* Create Claude options with structured output *) 611 687 let output_schema = 612 688 let open Jsont in 613 - Object ([ 614 - (("type", Meta.none), String ("object", Meta.none)); 615 - (("properties", Meta.none), Object ([ 616 - (("summary", Meta.none), Object ([ 617 - (("type", Meta.none), String ("string", Meta.none)); 618 - ], Meta.none)); 619 - (("changes", Meta.none), Object ([ 620 - (("type", Meta.none), String ("array", Meta.none)); 621 - (("items", Meta.none), Object ([ 622 - (("type", Meta.none), String ("string", Meta.none)); 623 - ], Meta.none)); 624 - ], Meta.none)); 625 - ], Meta.none)); 626 - (("required", Meta.none), Array ([ 627 - String ("summary", Meta.none); 628 - String ("changes", Meta.none); 629 - ], Meta.none)); 630 - ], Meta.none) 689 + Object 690 + ( [ 691 + (("type", Meta.none), String ("object", Meta.none)); 692 + ( ("properties", Meta.none), 693 + Object 694 + ( [ 695 + ( ("summary", Meta.none), 696 + Object 697 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 698 + Meta.none ) ); 699 + ( ("changes", Meta.none), 700 + Object 701 + ( [ 702 + (("type", Meta.none), String ("array", Meta.none)); 703 + ( ("items", Meta.none), 704 + Object 705 + ( [ 706 + ( ("type", Meta.none), 707 + String ("string", Meta.none) ); 708 + ], 709 + Meta.none ) ); 710 + ], 711 + Meta.none ) ); 712 + ], 713 + Meta.none ) ); 714 + ( ("required", Meta.none), 715 + Array 716 + ( [ 717 + String ("summary", Meta.none); String ("changes", Meta.none); 718 + ], 719 + Meta.none ) ); 720 + ], 721 + Meta.none ) 722 + in 723 + let output_format = 724 + Claude.Proto.Structured_output.of_json_schema output_schema 631 725 in 632 - let output_format = Claude.Proto.Structured_output.of_json_schema output_schema in 633 726 let options = 634 727 Claude.Options.default 635 728 |> Claude.Options.with_output_format output_format ··· 641 734 642 735 let responses = Claude.Client.receive_all client in 643 736 let result = ref None in 644 - List.iter (function 645 - | Claude.Response.Complete c -> ( 646 - match Claude.Response.Complete.structured_output c with 647 - | Some json -> ( 648 - match Jsont.Json.decode claude_response_jsont json with 649 - | Ok r -> 650 - (* Treat empty response as no changes *) 651 - if r.summary = "" && r.changes = [] then 652 - result := Some (Ok None) 653 - else 654 - result := Some (Ok (Some r)) 655 - | Error e -> 656 - result := Some (Error (Format.sprintf "Failed to decode response: %s" e))) 657 - | None -> 658 - (* Try to get text and parse it as fallback *) 659 - match Claude.Response.Complete.result_text c with 660 - | Some text -> result := Some (parse_claude_response text) 661 - | None -> result := Some (Ok None)) 662 - | Claude.Response.Text t -> 663 - let text = Claude.Response.Text.content t in 664 - if String.trim text = "NO_CHANGES" then 665 - result := Some (Ok None) 666 - | Claude.Response.Error e -> 667 - result := Some (Error (Printf.sprintf "Claude error: %s" (Claude.Response.Error.message e))) 668 - | _ -> ()) 737 + List.iter 738 + (function 739 + | Claude.Response.Complete c -> ( 740 + match Claude.Response.Complete.structured_output c with 741 + | Some json -> ( 742 + match Jsont.Json.decode claude_response_jsont json with 743 + | Ok r -> 744 + (* Treat empty response as no changes *) 745 + if r.summary = "" && r.changes = [] then 746 + result := Some (Ok None) 747 + else result := Some (Ok (Some r)) 748 + | Error e -> 749 + result := 750 + Some 751 + (Error 752 + (Format.sprintf "Failed to decode response: %s" e))) 753 + | None -> ( 754 + (* Try to get text and parse it as fallback *) 755 + match Claude.Response.Complete.result_text c with 756 + | Some text -> result := Some (parse_claude_response text) 757 + | None -> result := Some (Ok None))) 758 + | Claude.Response.Text t -> 759 + let text = Claude.Response.Text.content t in 760 + if String.trim text = "NO_CHANGES" then result := Some (Ok None) 761 + | Claude.Response.Error e -> 762 + result := 763 + Some 764 + (Error 765 + (Printf.sprintf "Claude error: %s" 766 + (Claude.Response.Error.message e))) 767 + | _ -> ()) 669 768 responses; 670 769 671 - match !result with 672 - | Some r -> r 673 - | None -> Ok None 770 + match !result with Some r -> r | None -> Ok None 674 771 end 675 772 676 773 (* Refine daily changelog markdown to be more narrative *) 677 - let refine_daily_changelog 678 - ~sw 679 - ~process_mgr 680 - ~clock 681 - markdown = 682 - let prompt = Printf.sprintf {|You are editing a daily changelog for an OCaml monorepo. 774 + let refine_daily_changelog ~sw ~process_mgr ~clock markdown = 775 + let prompt = 776 + Printf.sprintf 777 + {|You are editing a daily changelog for an OCaml monorepo. 683 778 684 779 Your task is to refine the following changelog to be: 685 780 1. More narrative and human-readable - write it as a daily update that developers will want to read ··· 705 800 706 801 %s 707 802 708 - Output ONLY the refined markdown, no explanation or preamble.|} markdown 803 + Output ONLY the refined markdown, no explanation or preamble.|} 804 + markdown 709 805 in 710 806 711 - let options = 712 - Claude.Options.default 713 - |> Claude.Options.with_max_turns 1 714 - in 807 + let options = Claude.Options.default |> Claude.Options.with_max_turns 1 in 715 808 716 809 let client = Claude.Client.create ~sw ~process_mgr ~clock ~options () in 717 810 Claude.Client.query client prompt; 718 811 719 812 let responses = Claude.Client.receive_all client in 720 813 let result = ref None in 721 - List.iter (function 722 - | Claude.Response.Complete c -> ( 723 - match Claude.Response.Complete.result_text c with 724 - | Some text -> result := Some (Ok text) 725 - | None -> result := Some (Ok markdown)) (* fallback to original *) 726 - | Claude.Response.Error e -> 727 - result := Some (Error (Printf.sprintf "Claude error: %s" (Claude.Response.Error.message e))) 728 - | _ -> ()) 814 + List.iter 815 + (function 816 + | Claude.Response.Complete c -> ( 817 + match Claude.Response.Complete.result_text c with 818 + | Some text -> result := Some (Ok text) 819 + | None -> result := Some (Ok markdown) (* fallback to original *)) 820 + | Claude.Response.Error e -> 821 + result := 822 + Some 823 + (Error 824 + (Printf.sprintf "Claude error: %s" 825 + (Claude.Response.Error.message e))) 826 + | _ -> ()) 729 827 responses; 730 828 731 829 match !result with ··· 749 847 (* Infer change type from summary text *) 750 848 let infer_change_type summary = 751 849 let summary_lower = String.lowercase_ascii summary in 752 - if String.starts_with ~prefix:"initial import" summary_lower || 753 - String.starts_with ~prefix:"added as subtree" summary_lower || 754 - String.starts_with ~prefix:"added" summary_lower && String.ends_with ~suffix:"library" summary_lower then 755 - Changes_aggregated.New_library 756 - else if List.exists (fun kw -> string_contains_s summary_lower kw) 757 - ["fix"; "bugfix"; "bug fix"; "repair"; "patch"; "resolve"; "correct"] then 758 - Changes_aggregated.Bugfix 759 - else if List.exists (fun kw -> string_contains_s summary_lower kw) 760 - ["refactor"; "cleanup"; "clean up"; "reorganize"; "restructure"; "simplify"] then 761 - Changes_aggregated.Refactor 762 - else if List.exists (fun kw -> string_contains_s summary_lower kw) 763 - ["doc"; "documentation"; "readme"; "comment"; "tutorial"; "guide"] then 764 - Changes_aggregated.Documentation 765 - else if List.exists (fun kw -> string_contains_s summary_lower kw) 766 - ["add"; "new"; "feature"; "implement"; "support"; "introduce"; "enable"] then 767 - Changes_aggregated.Feature 768 - else 769 - Changes_aggregated.Unknown 850 + if 851 + String.starts_with ~prefix:"initial import" summary_lower 852 + || String.starts_with ~prefix:"added as subtree" summary_lower 853 + || String.starts_with ~prefix:"added" summary_lower 854 + && String.ends_with ~suffix:"library" summary_lower 855 + then Changes_aggregated.New_library 856 + else if 857 + List.exists 858 + (fun kw -> string_contains_s summary_lower kw) 859 + [ "fix"; "bugfix"; "bug fix"; "repair"; "patch"; "resolve"; "correct" ] 860 + then Changes_aggregated.Bugfix 861 + else if 862 + List.exists 863 + (fun kw -> string_contains_s summary_lower kw) 864 + [ 865 + "refactor"; 866 + "cleanup"; 867 + "clean up"; 868 + "reorganize"; 869 + "restructure"; 870 + "simplify"; 871 + ] 872 + then Changes_aggregated.Refactor 873 + else if 874 + List.exists 875 + (fun kw -> string_contains_s summary_lower kw) 876 + [ "doc"; "documentation"; "readme"; "comment"; "tutorial"; "guide" ] 877 + then Changes_aggregated.Documentation 878 + else if 879 + List.exists 880 + (fun kw -> string_contains_s summary_lower kw) 881 + [ "add"; "new"; "feature"; "implement"; "support"; "introduce"; "enable" ] 882 + then Changes_aggregated.Feature 883 + else Changes_aggregated.Unknown 770 884 771 - (** Generate an aggregated daily file from individual daily json files. 772 - This creates a YYYYMMDD.json file in the .changes directory. *) 885 + (** Generate an aggregated daily file from individual daily json files. This 886 + creates a YYYYMMDD.json file in the .changes directory. *) 773 887 let generate_aggregated ~fs ~monorepo ~date ~git_head ~now = 774 888 let changes_dir = Eio.Path.(fs / Fpath.to_string monorepo / ".changes") in 775 889 776 890 (* List all *-<date>.json files (new per-day format) *) 777 - let files = 778 - try Eio.Path.read_dir changes_dir 779 - with Eio.Io _ -> [] 780 - in 891 + let files = try Eio.Path.read_dir changes_dir with Eio.Io _ -> [] in 781 892 (* Match files like "<repo>-2026-01-19.json" for the given date *) 782 893 let date_suffix = "-" ^ date ^ ".json" in 783 894 let date_suffix_len = String.length date_suffix in 784 - let daily_files = List.filter (fun f -> 785 - String.ends_with ~suffix:date_suffix f && String.length f > date_suffix_len) files 895 + let daily_files = 896 + List.filter 897 + (fun f -> 898 + String.ends_with ~suffix:date_suffix f 899 + && String.length f > date_suffix_len) 900 + files 786 901 in 787 902 788 903 (* Load all daily files for this date and collect entries *) 789 - let entries = List.concat_map (fun filename -> 790 - (* Extract repo name: filename is "<repo>-<date>.json" *) 791 - let repo_name = String.sub filename 0 (String.length filename - date_suffix_len) in 792 - let path = Eio.Path.(changes_dir / filename) in 793 - try 794 - let content = Eio.Path.load path in 795 - match Jsont_bytesrw.decode_string daily_changes_file_jsont content with 796 - | Ok dcf -> 797 - List.filter_map (fun (e : daily_entry) -> 798 - if e.changes <> [] then 799 - Some (repo_name, e) 800 - else 801 - None) dcf.entries 802 - | Error _ -> [] 803 - with Eio.Io _ -> [] 804 - ) daily_files in 904 + let entries = 905 + List.concat_map 906 + (fun filename -> 907 + (* Extract repo name: filename is "<repo>-<date>.json" *) 908 + let repo_name = 909 + String.sub filename 0 (String.length filename - date_suffix_len) 910 + in 911 + let path = Eio.Path.(changes_dir / filename) in 912 + try 913 + let content = Eio.Path.load path in 914 + match 915 + Jsont_bytesrw.decode_string daily_changes_file_jsont content 916 + with 917 + | Ok dcf -> 918 + List.filter_map 919 + (fun (e : daily_entry) -> 920 + if e.changes <> [] then Some (repo_name, e) else None) 921 + dcf.entries 922 + | Error _ -> [] 923 + with Eio.Io _ -> []) 924 + daily_files 925 + in 805 926 806 927 (* Convert to aggregated format *) 807 - let agg_entries = List.map (fun (repo_name, (e : daily_entry)) -> 808 - let change_type = infer_change_type e.summary in 809 - Changes_aggregated.{ 810 - repository = repo_name; 811 - hour = e.hour; 812 - timestamp = e.timestamp; 813 - summary = e.summary; 814 - changes = e.changes; 815 - commit_range = { 816 - from_hash = e.commit_range.from_hash; 817 - to_hash = e.commit_range.to_hash; 818 - count = e.commit_range.count; 819 - }; 820 - contributors = e.contributors; 821 - repo_url = e.repo_url; 822 - change_type; 823 - }) entries 928 + let agg_entries = 929 + List.map 930 + (fun (repo_name, (e : daily_entry)) -> 931 + let change_type = infer_change_type e.summary in 932 + Changes_aggregated. 933 + { 934 + repository = repo_name; 935 + hour = e.hour; 936 + timestamp = e.timestamp; 937 + summary = e.summary; 938 + changes = e.changes; 939 + commit_range = 940 + { 941 + from_hash = e.commit_range.from_hash; 942 + to_hash = e.commit_range.to_hash; 943 + count = e.commit_range.count; 944 + }; 945 + contributors = e.contributors; 946 + repo_url = e.repo_url; 947 + change_type; 948 + }) 949 + entries 824 950 in 825 951 826 952 (* Collect all unique authors *) ··· 831 957 in 832 958 833 959 (* Create the aggregated structure *) 834 - let aggregated : Changes_aggregated.t = { 835 - date; 836 - generated_at = now; 837 - git_head; 838 - entries = agg_entries; 839 - authors; 840 - } in 960 + let aggregated : Changes_aggregated.t = 961 + { date; generated_at = now; git_head; entries = agg_entries; authors } 962 + in 841 963 842 964 (* Save to YYYYMMDD.json *) 843 965 let changes_dir_fpath = Fpath.(v (Fpath.to_string monorepo) / ".changes") in
+90 -78
lib/changes.mli
··· 1 1 (** Changelog generation for monopam. 2 2 3 - This module handles generating weekly and daily changelog entries using Claude AI 4 - to analyze git commit history and produce user-facing change summaries. 3 + This module handles generating weekly and daily changelog entries using 4 + Claude AI to analyze git commit history and produce user-facing change 5 + summaries. 5 6 6 7 Changes are stored in a .changes directory at the monorepo root: 7 8 - .changes/<repo_name>.json - weekly changelog entries 8 - - .changes/<repo_name>-<YYYY-MM-DD>.json - daily changelog entries (one file per day per repo) 9 + - .changes/<repo_name>-<YYYY-MM-DD>.json - daily changelog entries (one file 10 + per day per repo) 9 11 - .changes/YYYYMMDD.json - aggregated daily changes for broadcasting 10 12 11 13 {1 Submodules} 12 14 13 - These modules provide types and I/O for querying the generated changes files. *) 15 + These modules provide types and I/O for querying the generated changes 16 + files. *) 14 17 15 - (** Aggregated daily changes format (YYYYMMDD.json files). *) 16 18 module Aggregated = Changes_aggregated 19 + (** Aggregated daily changes format (YYYYMMDD.json files). *) 17 20 18 - (** Daily changes with per-day-per-repo structure (repo-YYYY-MM-DD.json files). *) 19 21 module Daily = Changes_daily 22 + (** Daily changes with per-day-per-repo structure (repo-YYYY-MM-DD.json files). 23 + *) 20 24 21 - (** High-level query interface for changes. *) 22 25 module Query = Changes_query 26 + (** High-level query interface for changes. *) 23 27 24 28 (** {1 Types} *) 25 29 26 - type commit_range = { 27 - from_hash : string; 28 - to_hash : string; 29 - count : int; 30 - } 30 + type commit_range = { from_hash : string; to_hash : string; count : int } 31 31 (** Range of commits included in a changelog entry. *) 32 32 33 33 type weekly_entry = { 34 34 week_start : string; (** ISO date YYYY-MM-DD, Monday *) 35 - week_end : string; (** ISO date YYYY-MM-DD, Sunday *) 36 - summary : string; (** One-line summary *) 37 - changes : string list; (** Bullet points *) 35 + week_end : string; (** ISO date YYYY-MM-DD, Sunday *) 36 + summary : string; (** One-line summary *) 37 + changes : string list; (** Bullet points *) 38 38 commit_range : commit_range; 39 39 } 40 40 (** A single week's changelog entry. *) 41 41 42 42 type daily_entry = { 43 - date : string; (** ISO date YYYY-MM-DD *) 44 - hour : int; (** Hour of day 0-23 for filtering *) 43 + date : string; (** ISO date YYYY-MM-DD *) 44 + hour : int; (** Hour of day 0-23 for filtering *) 45 45 timestamp : Ptime.t; (** RFC3339 timestamp for precise ordering *) 46 - summary : string; (** One-line summary *) 47 - changes : string list; (** Bullet points *) 46 + summary : string; (** One-line summary *) 47 + changes : string list; (** Bullet points *) 48 48 commit_range : commit_range; 49 - contributors : string list; (** List of contributors for this entry *) 50 - repo_url : string option; (** Upstream repository URL *) 49 + contributors : string list; (** List of contributors for this entry *) 50 + repo_url : string option; (** Upstream repository URL *) 51 51 } 52 52 (** A single day's changelog entry with hour tracking for real-time updates. *) 53 53 54 - type changes_file = { 55 - repository : string; 56 - entries : weekly_entry list; 57 - } 54 + type changes_file = { repository : string; entries : weekly_entry list } 58 55 (** Contents of a weekly changes JSON file for a repository. *) 59 56 60 - type daily_changes_file = { 61 - repository : string; 62 - entries : daily_entry list; 63 - } 57 + type daily_changes_file = { repository : string; entries : daily_entry list } 64 58 (** Contents of a daily changes JSON file for a repository. *) 65 59 66 60 (** Mode for changelog generation. *) ··· 85 79 86 80 (** {1 File I/O} *) 87 81 88 - val load : fs:_ Eio.Path.t -> monorepo:Fpath.t -> string -> (changes_file, string) result 89 - (** [load ~fs ~monorepo repo_name] loads weekly changes from .changes/<repo_name>.json. 90 - Returns an empty changes file if the file does not exist. *) 82 + val load : 83 + fs:_ Eio.Path.t -> monorepo:Fpath.t -> string -> (changes_file, string) result 84 + (** [load ~fs ~monorepo repo_name] loads weekly changes from 85 + .changes/<repo_name>.json. Returns an empty changes file if the file does 86 + not exist. *) 91 87 92 - val save : fs:_ Eio.Path.t -> monorepo:Fpath.t -> changes_file -> (unit, string) result 93 - (** [save ~fs ~monorepo cf] saves the changes file to .changes/<repo_name>.json. *) 88 + val save : 89 + fs:_ Eio.Path.t -> monorepo:Fpath.t -> changes_file -> (unit, string) result 90 + (** [save ~fs ~monorepo cf] saves the changes file to .changes/<repo_name>.json. 91 + *) 94 92 95 - val daily_exists : fs:_ Eio.Path.t -> monorepo:Fpath.t -> date:string -> string -> bool 96 - (** [daily_exists ~fs ~monorepo ~date repo_name] checks if a daily changes file exists. 93 + val daily_exists : 94 + fs:_ Eio.Path.t -> monorepo:Fpath.t -> date:string -> string -> bool 95 + (** [daily_exists ~fs ~monorepo ~date repo_name] checks if a daily changes file 96 + exists. 97 97 @param date Date in YYYY-MM-DD format *) 98 98 99 - val load_daily : fs:_ Eio.Path.t -> monorepo:Fpath.t -> date:string -> string -> (daily_changes_file, string) result 100 - (** [load_daily ~fs ~monorepo ~date repo_name] loads daily changes from .changes/<repo_name>-<date>.json. 101 - Returns an empty changes file if the file does not exist. 99 + val load_daily : 100 + fs:_ Eio.Path.t -> 101 + monorepo:Fpath.t -> 102 + date:string -> 103 + string -> 104 + (daily_changes_file, string) result 105 + (** [load_daily ~fs ~monorepo ~date repo_name] loads daily changes from 106 + .changes/<repo_name>-<date>.json. Returns an empty changes file if the file 107 + does not exist. 102 108 @param date Date in YYYY-MM-DD format *) 103 109 104 - val save_daily : fs:_ Eio.Path.t -> monorepo:Fpath.t -> date:string -> daily_changes_file -> (unit, string) result 105 - (** [save_daily ~fs ~monorepo ~date cf] saves the changes file to .changes/<repo_name>-<date>.json. 110 + val save_daily : 111 + fs:_ Eio.Path.t -> 112 + monorepo:Fpath.t -> 113 + date:string -> 114 + daily_changes_file -> 115 + (unit, string) result 116 + (** [save_daily ~fs ~monorepo ~date cf] saves the changes file to 117 + .changes/<repo_name>-<date>.json. 106 118 @param date Date in YYYY-MM-DD format *) 107 119 108 120 (** {1 Markdown Generation} *) ··· 111 123 (** [to_markdown cf] generates markdown from a single weekly changes file. *) 112 124 113 125 val aggregate : history:int -> changes_file list -> string 114 - (** [aggregate ~history cfs] generates combined markdown from multiple weekly changes files. 126 + (** [aggregate ~history cfs] generates combined markdown from multiple weekly 127 + changes files. 115 128 @param history Number of weeks to include (0 for all) *) 116 129 117 130 val aggregate_daily : history:int -> daily_changes_file list -> string 118 - (** [aggregate_daily ~history cfs] generates combined markdown from multiple daily changes files. 119 - Only includes repos with actual changes (filters out empty entries). 131 + (** [aggregate_daily ~history cfs] generates combined markdown from multiple 132 + daily changes files. Only includes repos with actual changes (filters out 133 + empty entries). 120 134 @param history Number of days to include (0 for all) *) 121 135 122 136 (** {1 Date Calculation} *) ··· 125 139 (** [format_date (year, month, day)] formats a date as YYYY-MM-DD. *) 126 140 127 141 val week_of_date : int * int * int -> string * string 128 - (** [week_of_date (year, month, day)] returns (week_start, week_end) as ISO date strings. 129 - week_start is Monday, week_end is Sunday. *) 142 + (** [week_of_date (year, month, day)] returns (week_start, week_end) as ISO date 143 + strings. week_start is Monday, week_end is Sunday. *) 130 144 131 145 val week_of_ptime : Ptime.t -> string * string 132 146 (** [week_of_ptime t] returns (week_start, week_end) for the given timestamp. *) ··· 135 149 (** [date_of_ptime t] returns the date as YYYY-MM-DD for the given timestamp. *) 136 150 137 151 val has_week : changes_file -> week_start:string -> bool 138 - (** [has_week cf ~week_start] returns true if the changes file already has an entry 139 - for the week starting on the given date. *) 152 + (** [has_week cf ~week_start] returns true if the changes file already has an 153 + entry for the week starting on the given date. *) 140 154 141 155 val has_day : daily_changes_file -> date:string -> bool 142 - (** [has_day cf ~date] returns true if the daily changes file already has an entry 143 - for the given date. *) 156 + (** [has_day cf ~date] returns true if the daily changes file already has an 157 + entry for the given date. *) 144 158 145 159 (** {1 Claude Integration} *) 146 160 147 - type claude_response = { 148 - summary : string; 149 - changes : string list; 150 - } 161 + type claude_response = { summary : string; changes : string list } 151 162 (** Response from Claude analysis. *) 152 163 153 164 val generate_prompt : ··· 156 167 week_end:string -> 157 168 Git.log_entry list -> 158 169 string 159 - (** [generate_prompt ~repository ~week_start ~week_end commits] creates the prompt 160 - to send to Claude for weekly changelog generation. *) 170 + (** [generate_prompt ~repository ~week_start ~week_end commits] creates the 171 + prompt to send to Claude for weekly changelog generation. *) 161 172 162 173 val generate_weekly_prompt : 163 174 repository:string -> ··· 165 176 week_end:string -> 166 177 Git.log_entry list -> 167 178 string 168 - (** [generate_weekly_prompt ~repository ~week_start ~week_end commits] creates the prompt 169 - to send to Claude for weekly changelog generation. *) 179 + (** [generate_weekly_prompt ~repository ~week_start ~week_end commits] creates 180 + the prompt to send to Claude for weekly changelog generation. *) 170 181 171 182 val generate_daily_prompt : 172 - repository:string -> 173 - date:string -> 174 - Git.log_entry list -> 175 - string 176 - (** [generate_daily_prompt ~repository ~date commits] creates the prompt 177 - to send to Claude for daily changelog generation. *) 183 + repository:string -> date:string -> Git.log_entry list -> string 184 + (** [generate_daily_prompt ~repository ~date commits] creates the prompt to send 185 + to Claude for daily changelog generation. *) 178 186 179 187 val parse_claude_response : string -> (claude_response option, string) result 180 - (** [parse_claude_response text] parses Claude's response. 181 - Returns [Ok None] if the response is empty (blank summary and changes) or "NO_CHANGES". 182 - Returns [Ok (Some r)] if valid JSON was parsed with actual changes. 183 - Returns [Error msg] if parsing failed. *) 188 + (** [parse_claude_response text] parses Claude's response. Returns [Ok None] if 189 + the response is empty (blank summary and changes) or "NO_CHANGES". Returns 190 + [Ok (Some r)] if valid JSON was parsed with actual changes. Returns 191 + [Error msg] if parsing failed. *) 184 192 185 193 val analyze_commits : 186 194 sw:Eio.Switch.t -> ··· 191 199 week_end:string -> 192 200 Git.log_entry list -> 193 201 (claude_response option, string) result 194 - (** [analyze_commits ~sw ~process_mgr ~clock ~repository ~week_start ~week_end commits] 195 - sends commits to Claude for weekly analysis and returns the parsed response. *) 202 + (** [analyze_commits ~sw ~process_mgr ~clock ~repository ~week_start ~week_end 203 + commits] sends commits to Claude for weekly analysis and returns the parsed 204 + response. *) 196 205 197 206 val analyze_commits_daily : 198 207 sw:Eio.Switch.t -> ··· 203 212 Git.log_entry list -> 204 213 (claude_response option, string) result 205 214 (** [analyze_commits_daily ~sw ~process_mgr ~clock ~repository ~date commits] 206 - sends commits to Claude for daily analysis and returns the parsed response. *) 215 + sends commits to Claude for daily analysis and returns the parsed response. 216 + *) 207 217 208 218 val refine_daily_changelog : 209 219 sw:Eio.Switch.t -> ··· 213 223 (string, string) result 214 224 (** [refine_daily_changelog ~sw ~process_mgr ~clock markdown] sends the raw 215 225 daily changelog markdown through Claude to produce a more narrative, 216 - well-organized version. Groups related changes together and orders them 217 - by significance. Ensures all repository names are formatted as markdown 218 - links using the pattern [\[repo-name\](https://tangled.org/@anil.recoil.org/repo-name.git)]. 219 - Returns the refined markdown or the original on error. *) 226 + well-organized version. Groups related changes together and orders them by 227 + significance. Ensures all repository names are formatted as markdown links 228 + using the pattern 229 + [[repo-name](https://tangled.org/@anil.recoil.org/repo-name.git)]. Returns 230 + the refined markdown or the original on error. *) 220 231 221 232 (** {1 Aggregated Files} *) 222 233 ··· 227 238 git_head:string -> 228 239 now:Ptime.t -> 229 240 (unit, string) result 230 - (** [generate_aggregated ~fs ~monorepo ~date ~git_head ~now] generates an aggregated 231 - JSON file from all daily JSON files. 241 + (** [generate_aggregated ~fs ~monorepo ~date ~git_head ~now] generates an 242 + aggregated JSON file from all daily JSON files. 232 243 233 244 This creates a .changes/YYYYMMDD.json file containing all repository entries 234 - for the specified date, with change type classification and author aggregation. 245 + for the specified date, with change type classification and author 246 + aggregation. 235 247 236 248 @param fs Filesystem path 237 249 @param monorepo Path to the monorepo root
+71 -46
lib/changes_aggregated.ml
··· 34 34 | New_library -> "new_library" 35 35 | Unknown -> "unknown" 36 36 37 - type commit_range = { 38 - from_hash : string; 39 - to_hash : string; 40 - count : int; 41 - } 37 + type commit_range = { from_hash : string; to_hash : string; count : int } 42 38 43 39 type entry = { 44 40 repository : string; ··· 63 59 (* JSON codecs *) 64 60 65 61 let change_type_jsont = 66 - Jsont.enum ~kind:"change_type" [ 67 - ("feature", Feature); 68 - ("bugfix", Bugfix); 69 - ("documentation", Documentation); 70 - ("refactor", Refactor); 71 - ("new_library", New_library); 72 - ("unknown", Unknown); 73 - ] 62 + Jsont.enum ~kind:"change_type" 63 + [ 64 + ("feature", Feature); 65 + ("bugfix", Bugfix); 66 + ("documentation", Documentation); 67 + ("refactor", Refactor); 68 + ("new_library", New_library); 69 + ("unknown", Unknown); 70 + ] 74 71 75 72 let commit_range_jsont = 76 73 let make from_hash to_hash count = { from_hash; to_hash; count } in ··· 81 78 |> Jsont.Object.finish 82 79 83 80 let ptime_jsont = 84 - let enc t = 85 - Ptime.to_rfc3339 t ~tz_offset_s:0 86 - in 81 + let enc t = Ptime.to_rfc3339 t ~tz_offset_s:0 in 87 82 let dec s = 88 83 match Ptime.of_rfc3339 s with 89 84 | Ok (t, _, _) -> t ··· 92 87 Jsont.map ~dec ~enc Jsont.string 93 88 94 89 let entry_jsont = 95 - let make repository hour timestamp summary changes commit_range contributors repo_url change_type = 96 - { repository; hour; timestamp; summary; changes; commit_range; contributors; repo_url; change_type } 90 + let make repository hour timestamp summary changes commit_range contributors 91 + repo_url change_type = 92 + { 93 + repository; 94 + hour; 95 + timestamp; 96 + summary; 97 + changes; 98 + commit_range; 99 + contributors; 100 + repo_url; 101 + change_type; 102 + } 97 103 in 98 104 (* Default hour and timestamp for backwards compat when reading old files *) 99 105 let default_hour = 0 in 100 106 let default_timestamp = Ptime.epoch in 101 107 Jsont.Object.map ~kind:"aggregated_entry" make 102 108 |> Jsont.Object.mem "repository" Jsont.string ~enc:(fun e -> e.repository) 103 - |> Jsont.Object.mem "hour" Jsont.int ~dec_absent:default_hour ~enc:(fun e -> e.hour) 104 - |> Jsont.Object.mem "timestamp" ptime_jsont ~dec_absent:default_timestamp ~enc:(fun e -> e.timestamp) 109 + |> Jsont.Object.mem "hour" Jsont.int ~dec_absent:default_hour ~enc:(fun e -> 110 + e.hour) 111 + |> Jsont.Object.mem "timestamp" ptime_jsont ~dec_absent:default_timestamp 112 + ~enc:(fun e -> e.timestamp) 105 113 |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun e -> e.summary) 106 - |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun e -> e.changes) 107 - |> Jsont.Object.mem "commit_range" commit_range_jsont ~enc:(fun e -> e.commit_range) 108 - |> Jsont.Object.mem "contributors" (Jsont.list Jsont.string) ~dec_absent:[] ~enc:(fun e -> e.contributors) 109 - |> Jsont.Object.mem "repo_url" (Jsont.option Jsont.string) ~dec_absent:None ~enc:(fun e -> e.repo_url) 110 - |> Jsont.Object.mem "change_type" change_type_jsont ~dec_absent:Unknown ~enc:(fun e -> e.change_type) 114 + |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun e -> 115 + e.changes) 116 + |> Jsont.Object.mem "commit_range" commit_range_jsont ~enc:(fun e -> 117 + e.commit_range) 118 + |> Jsont.Object.mem "contributors" (Jsont.list Jsont.string) ~dec_absent:[] 119 + ~enc:(fun e -> e.contributors) 120 + |> Jsont.Object.mem "repo_url" (Jsont.option Jsont.string) ~dec_absent:None 121 + ~enc:(fun e -> e.repo_url) 122 + |> Jsont.Object.mem "change_type" change_type_jsont ~dec_absent:Unknown 123 + ~enc:(fun e -> e.change_type) 111 124 |> Jsont.Object.finish 112 125 113 126 let jsont = ··· 118 131 |> Jsont.Object.mem "date" Jsont.string ~enc:(fun t -> t.date) 119 132 |> Jsont.Object.mem "generated_at" ptime_jsont ~enc:(fun t -> t.generated_at) 120 133 |> Jsont.Object.mem "git_head" Jsont.string ~enc:(fun t -> t.git_head) 121 - |> Jsont.Object.mem "entries" (Jsont.list entry_jsont) ~enc:(fun t -> t.entries) 122 - |> Jsont.Object.mem "authors" (Jsont.list Jsont.string) ~dec_absent:[] ~enc:(fun t -> t.authors) 134 + |> Jsont.Object.mem "entries" (Jsont.list entry_jsont) ~enc:(fun t -> 135 + t.entries) 136 + |> Jsont.Object.mem "authors" (Jsont.list Jsont.string) ~dec_absent:[] 137 + ~enc:(fun t -> t.authors) 123 138 |> Jsont.Object.finish 124 139 125 140 (* File I/O *) ··· 137 152 let mm = String.sub yyyymmdd 4 2 in 138 153 let dd = String.sub yyyymmdd 6 2 in 139 154 Some (yyyy ^ "-" ^ mm ^ "-" ^ dd) 140 - else 141 - None 155 + else None 142 156 143 157 let load ~fs ~changes_dir ~date = 144 158 let filename = filename_of_date date in ··· 156 170 (* List all YYYYMMDD.json files and filter by range *) 157 171 let dir_path = Eio.Path.(fs / Fpath.to_string changes_dir) in 158 172 match Eio.Path.kind ~follow:true dir_path with 159 - | `Directory -> ( 173 + | `Directory -> 160 174 let entries = Eio.Path.read_dir dir_path in 161 - let json_files = List.filter (fun f -> 162 - String.length f = 13 && String.ends_with ~suffix:".json" f && 163 - not (String.contains f '-')) entries 175 + let json_files = 176 + List.filter 177 + (fun f -> 178 + String.length f = 13 179 + && String.ends_with ~suffix:".json" f 180 + && not (String.contains f '-')) 181 + entries 164 182 in 165 183 let sorted = List.sort String.compare json_files in 166 184 let from_file = filename_of_date from_date in 167 185 let to_file = filename_of_date to_date in 168 - let in_range = List.filter (fun f -> 169 - f >= from_file && f <= to_file) sorted 186 + let in_range = 187 + List.filter (fun f -> f >= from_file && f <= to_file) sorted 170 188 in 171 - let results = List.filter_map (fun filename -> 172 - match date_of_filename filename with 173 - | Some date -> ( 174 - match load ~fs ~changes_dir ~date with 175 - | Ok t -> Some t 176 - | Error _ -> None) 177 - | None -> None) in_range 189 + let results = 190 + List.filter_map 191 + (fun filename -> 192 + match date_of_filename filename with 193 + | Some date -> ( 194 + match load ~fs ~changes_dir ~date with 195 + | Ok t -> Some t 196 + | Error _ -> None) 197 + | None -> None) 198 + in_range 178 199 in 179 - Ok results) 200 + Ok results 180 201 | _ -> Error "Changes directory not found" 181 202 | exception Eio.Io _ -> Error "Could not read changes directory" 182 203 ··· 185 206 match Eio.Path.kind ~follow:true dir_path with 186 207 | `Directory -> ( 187 208 let entries = Eio.Path.read_dir dir_path in 188 - let json_files = List.filter (fun f -> 189 - String.length f = 13 && String.ends_with ~suffix:".json" f && 190 - not (String.contains f '-')) entries 209 + let json_files = 210 + List.filter 211 + (fun f -> 212 + String.length f = 13 213 + && String.ends_with ~suffix:".json" f 214 + && not (String.contains f '-')) 215 + entries 191 216 in 192 217 match List.sort (fun a b -> String.compare b a) json_files with 193 218 | [] -> Ok None
+30 -29
lib/changes_aggregated.mli
··· 14 14 15 15 (** Classification of changes for grouping in broadcasts. *) 16 16 type change_type = 17 - | Feature (** New features or capabilities *) 18 - | Bugfix (** Bug fixes *) 19 - | Documentation (** Documentation updates *) 20 - | Refactor (** Code refactoring *) 21 - | New_library (** Initial import of a new library *) 22 - | Unknown (** Unclassified changes *) 17 + | Feature (** New features or capabilities *) 18 + | Bugfix (** Bug fixes *) 19 + | Documentation (** Documentation updates *) 20 + | Refactor (** Code refactoring *) 21 + | New_library (** Initial import of a new library *) 22 + | Unknown (** Unclassified changes *) 23 23 24 24 val change_type_of_string : string -> change_type 25 25 val string_of_change_type : change_type -> string 26 26 27 27 (** {1 Entry Types} *) 28 28 29 - (** Commit range information. *) 30 29 type commit_range = { 31 30 from_hash : string; (** Starting commit hash *) 32 - to_hash : string; (** Ending commit hash *) 33 - count : int; (** Number of commits in range *) 31 + to_hash : string; (** Ending commit hash *) 32 + count : int; (** Number of commits in range *) 34 33 } 34 + (** Commit range information. *) 35 35 36 - (** A single repository's changes for the day. *) 37 36 type entry = { 38 - repository : string; (** Repository name *) 39 - hour : int; (** Hour of day 0-23 for filtering *) 40 - timestamp : Ptime.t; (** RFC3339 timestamp for precise ordering *) 41 - summary : string; (** One-line summary of changes *) 42 - changes : string list; (** List of change bullet points *) 37 + repository : string; (** Repository name *) 38 + hour : int; (** Hour of day 0-23 for filtering *) 39 + timestamp : Ptime.t; (** RFC3339 timestamp for precise ordering *) 40 + summary : string; (** One-line summary of changes *) 41 + changes : string list; (** List of change bullet points *) 43 42 commit_range : commit_range; (** Commits included *) 44 - contributors : string list; (** Contributors to these changes *) 45 - repo_url : string option; (** Optional repository URL *) 46 - change_type : change_type; (** Classification of the change *) 43 + contributors : string list; (** Contributors to these changes *) 44 + repo_url : string option; (** Optional repository URL *) 45 + change_type : change_type; (** Classification of the change *) 47 46 } 47 + (** A single repository's changes for the day. *) 48 48 49 49 (** {1 Aggregated File Type} *) 50 50 51 - (** The complete aggregated daily changes file. *) 52 51 type t = { 53 - date : string; (** ISO date YYYY-MM-DD *) 54 - generated_at : Ptime.t; (** When this file was generated *) 55 - git_head : string; (** Monorepo HEAD at generation time *) 56 - entries : entry list; (** All repository entries for this day *) 57 - authors : string list; (** All unique authors for this day *) 52 + date : string; (** ISO date YYYY-MM-DD *) 53 + generated_at : Ptime.t; (** When this file was generated *) 54 + git_head : string; (** Monorepo HEAD at generation time *) 55 + entries : entry list; (** All repository entries for this day *) 56 + authors : string list; (** All unique authors for this day *) 58 57 } 58 + (** The complete aggregated daily changes file. *) 59 59 60 60 (** {1 JSON Codecs} *) 61 61 ··· 64 64 65 65 (** {1 File I/O} *) 66 66 67 - val load : fs:_ Eio.Path.t -> changes_dir:Fpath.t -> date:string -> (t, string) result 68 - (** Load aggregated changes for a specific date. 69 - [date] should be in YYYY-MM-DD format. *) 67 + val load : 68 + fs:_ Eio.Path.t -> changes_dir:Fpath.t -> date:string -> (t, string) result 69 + (** Load aggregated changes for a specific date. [date] should be in YYYY-MM-DD 70 + format. *) 70 71 71 72 val load_range : 72 73 fs:_ Eio.Path.t -> ··· 74 75 from_date:string -> 75 76 to_date:string -> 76 77 (t list, string) result 77 - (** Load all aggregated changes files in date range. 78 - Dates should be in YYYY-MM-DD format. *) 78 + (** Load all aggregated changes files in date range. Dates should be in 79 + YYYY-MM-DD format. *) 79 80 80 81 val latest : fs:_ Eio.Path.t -> changes_dir:Fpath.t -> (t option, string) result 81 82 (** Load the most recent aggregated changes file. *)
+104 -74
lib/changes_daily.ml
··· 10 10 [<repo>-<YYYY-MM-DD>.json] and contain timestamped entries for real-time 11 11 tracking. *) 12 12 13 - type commit_range = { 14 - from_hash : string; 15 - to_hash : string; 16 - count : int; 17 - } 13 + type commit_range = { from_hash : string; to_hash : string; count : int } 18 14 19 15 type entry = { 20 16 repository : string; ··· 27 23 repo_url : string option; 28 24 } 29 25 30 - type day = { 31 - repository : string; 32 - date : string; 33 - entries : entry list; 34 - } 26 + type day = { repository : string; date : string; entries : entry list } 35 27 36 - module String_map = Map.Make(String) 28 + module String_map = Map.Make (String) 37 29 38 30 type t = { 39 31 by_repo : day list String_map.t; ··· 78 70 let default_hour = 0 in 79 71 let default_timestamp = Ptime.epoch in 80 72 Jsont.Object.map ~kind:"daily_entry" make 81 - |> Jsont.Object.mem "hour" Jsont.int ~dec_absent:default_hour ~enc:(fun e -> e.hour) 82 - |> Jsont.Object.mem "timestamp" ptime_jsont ~dec_absent:default_timestamp ~enc:(fun e -> e.timestamp) 73 + |> Jsont.Object.mem "hour" Jsont.int ~dec_absent:default_hour ~enc:(fun e -> 74 + e.hour) 75 + |> Jsont.Object.mem "timestamp" ptime_jsont ~dec_absent:default_timestamp 76 + ~enc:(fun e -> e.timestamp) 83 77 |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun e -> e.summary) 84 - |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun e -> e.changes) 85 - |> Jsont.Object.mem "commit_range" commit_range_jsont ~enc:(fun e -> e.commit_range) 86 - |> Jsont.Object.mem "contributors" (Jsont.list Jsont.string) ~dec_absent:[] ~enc:(fun e -> e.contributors) 87 - |> Jsont.Object.mem "repo_url" (Jsont.option Jsont.string) ~dec_absent:None ~enc:(fun e -> e.repo_url) 78 + |> Jsont.Object.mem "changes" (Jsont.list Jsont.string) ~enc:(fun e -> 79 + e.changes) 80 + |> Jsont.Object.mem "commit_range" commit_range_jsont ~enc:(fun e -> 81 + e.commit_range) 82 + |> Jsont.Object.mem "contributors" (Jsont.list Jsont.string) ~dec_absent:[] 83 + ~enc:(fun e -> e.contributors) 84 + |> Jsont.Object.mem "repo_url" (Jsont.option Jsont.string) ~dec_absent:None 85 + ~enc:(fun e -> e.repo_url) 88 86 |> Jsont.Object.finish 89 87 90 - type json_file = { 91 - json_repository : string; 92 - json_entries : file_entry list; 93 - } 88 + type json_file = { json_repository : string; json_entries : file_entry list } 94 89 95 90 let json_file_jsont = 96 91 let make json_repository json_entries = { json_repository; json_entries } in 97 92 Jsont.Object.map ~kind:"daily_changes_file" make 98 - |> Jsont.Object.mem "repository" Jsont.string ~enc:(fun f -> f.json_repository) 99 - |> Jsont.Object.mem "entries" (Jsont.list file_entry_jsont) ~enc:(fun f -> f.json_entries) 93 + |> Jsont.Object.mem "repository" Jsont.string ~enc:(fun f -> 94 + f.json_repository) 95 + |> Jsont.Object.mem "entries" (Jsont.list file_entry_jsont) ~enc:(fun f -> 96 + f.json_entries) 100 97 |> Jsont.Object.finish 101 98 102 99 (* Parse date from filename: <repo>-<YYYY-MM-DD>.json *) 103 100 let parse_daily_filename filename = 104 101 (* Check for pattern: ends with -YYYY-MM-DD.json *) 105 102 let len = String.length filename in 106 - if len < 16 || not (String.ends_with ~suffix:".json" filename) then 107 - None 103 + if len < 16 || not (String.ends_with ~suffix:".json" filename) then None 108 104 else 109 105 (* Try to extract date: last 15 chars are -YYYY-MM-DD.json *) 110 106 let date_start = len - 15 in 111 107 let potential_date = String.sub filename (date_start + 1) 10 in 112 108 (* Validate date format YYYY-MM-DD *) 113 - if String.length potential_date = 10 && 114 - potential_date.[4] = '-' && potential_date.[7] = '-' then 109 + if 110 + String.length potential_date = 10 111 + && potential_date.[4] = '-' 112 + && potential_date.[7] = '-' 113 + then 115 114 let repo = String.sub filename 0 date_start in 116 115 Some (repo, potential_date) 117 - else 118 - None 116 + else None 119 117 120 118 (* Load a single daily file *) 121 119 let load_file ~fs ~changes_dir ~repo ~date : entry list = ··· 126 124 let content = Eio.Path.load file_path in 127 125 match Jsont_bytesrw.decode_string json_file_jsont content with 128 126 | Ok jf -> 129 - List.map (fun (fe : file_entry) : entry -> 130 - { repository = repo; 131 - hour = fe.hour; 132 - timestamp = fe.timestamp; 133 - summary = fe.summary; 134 - changes = fe.changes; 135 - commit_range = fe.commit_range; 136 - contributors = fe.contributors; 137 - repo_url = fe.repo_url; 138 - }) jf.json_entries 127 + List.map 128 + (fun (fe : file_entry) : entry -> 129 + { 130 + repository = repo; 131 + hour = fe.hour; 132 + timestamp = fe.timestamp; 133 + summary = fe.summary; 134 + changes = fe.changes; 135 + commit_range = fe.commit_range; 136 + contributors = fe.contributors; 137 + repo_url = fe.repo_url; 138 + }) 139 + jf.json_entries 139 140 | Error _ -> []) 140 141 | _ -> [] 141 142 | exception Eio.Io _ -> [] 142 143 143 - let empty = { 144 - by_repo = String_map.empty; 145 - by_date = String_map.empty; 146 - all_entries = []; 147 - } 144 + let empty = 145 + { by_repo = String_map.empty; by_date = String_map.empty; all_entries = [] } 148 146 149 147 let list_repos ~fs ~changes_dir = 150 148 let dir_path = Eio.Path.(fs / Fpath.to_string changes_dir) in ··· 168 166 match parse_daily_filename filename with 169 167 | Some (r, date) when r = repo -> Some date 170 168 | _ -> None) 171 - |> List.sort (fun a b -> String.compare b a) (* descending *) 169 + |> List.sort (fun a b -> String.compare b a) 170 + (* descending *) 172 171 | _ -> [] 173 172 | exception Eio.Io _ -> [] 174 173 ··· 187 186 let parsed_files = List.filter_map parse_daily_filename files in 188 187 189 188 (* Load all files and build days *) 190 - let days : day list = List.filter_map (fun (repo, date) -> 191 - let loaded_entries : entry list = load_file ~fs ~changes_dir ~repo ~date in 192 - if loaded_entries = [] then None 193 - else 194 - let sorted_entries : entry list = List.sort (fun (e1 : entry) (e2 : entry) -> 195 - Ptime.compare e1.timestamp e2.timestamp) loaded_entries 196 - in 197 - Some ({ repository = repo; date; entries = sorted_entries } : day) 198 - ) parsed_files in 189 + let days : day list = 190 + List.filter_map 191 + (fun (repo, date) -> 192 + let loaded_entries : entry list = 193 + load_file ~fs ~changes_dir ~repo ~date 194 + in 195 + if loaded_entries = [] then None 196 + else 197 + let sorted_entries : entry list = 198 + List.sort 199 + (fun (e1 : entry) (e2 : entry) -> 200 + Ptime.compare e1.timestamp e2.timestamp) 201 + loaded_entries 202 + in 203 + Some ({ repository = repo; date; entries = sorted_entries } : day)) 204 + parsed_files 205 + in 199 206 200 207 (* Build by_repo map *) 201 - let by_repo : day list String_map.t = List.fold_left (fun acc (d : day) -> 202 - let existing = String_map.find_opt d.repository acc |> Option.value ~default:[] in 203 - String_map.add d.repository (d :: existing) acc 204 - ) String_map.empty days in 208 + let by_repo : day list String_map.t = 209 + List.fold_left 210 + (fun acc (d : day) -> 211 + let existing = 212 + String_map.find_opt d.repository acc |> Option.value ~default:[] 213 + in 214 + String_map.add d.repository (d :: existing) acc) 215 + String_map.empty days 216 + in 205 217 206 218 (* Sort each repo's days by date descending *) 207 - let by_repo : day list String_map.t = String_map.map (fun (ds : day list) -> 208 - List.sort (fun (d1 : day) (d2 : day) -> String.compare d2.date d1.date) ds 209 - ) by_repo in 219 + let by_repo : day list String_map.t = 220 + String_map.map 221 + (fun (ds : day list) -> 222 + List.sort 223 + (fun (d1 : day) (d2 : day) -> String.compare d2.date d1.date) 224 + ds) 225 + by_repo 226 + in 210 227 211 228 (* Build by_date map *) 212 - let by_date : day list String_map.t = List.fold_left (fun acc (d : day) -> 213 - let existing = String_map.find_opt d.date acc |> Option.value ~default:[] in 214 - String_map.add d.date (d :: existing) acc 215 - ) String_map.empty days in 229 + let by_date : day list String_map.t = 230 + List.fold_left 231 + (fun acc (d : day) -> 232 + let existing = 233 + String_map.find_opt d.date acc |> Option.value ~default:[] 234 + in 235 + String_map.add d.date (d :: existing) acc) 236 + String_map.empty days 237 + in 216 238 217 239 (* Sort each date's days by repo name *) 218 - let by_date : day list String_map.t = String_map.map (fun (ds : day list) -> 219 - List.sort (fun (d1 : day) (d2 : day) -> String.compare d1.repository d2.repository) ds 220 - ) by_date in 240 + let by_date : day list String_map.t = 241 + String_map.map 242 + (fun (ds : day list) -> 243 + List.sort 244 + (fun (d1 : day) (d2 : day) -> 245 + String.compare d1.repository d2.repository) 246 + ds) 247 + by_date 248 + in 221 249 222 250 (* Collect all entries sorted by timestamp *) 223 251 let all_entries : entry list = 224 252 days 225 253 |> List.concat_map (fun (d : day) -> d.entries) 226 - |> List.sort (fun (e1 : entry) (e2 : entry) -> Ptime.compare e1.timestamp e2.timestamp) 254 + |> List.sort (fun (e1 : entry) (e2 : entry) -> 255 + Ptime.compare e1.timestamp e2.timestamp) 227 256 in 228 257 229 258 { by_repo; by_date; all_entries } 230 - 231 259 | _ -> empty 232 260 | exception Eio.Io _ -> empty 233 261 234 262 let since (t : t) (timestamp : Ptime.t) : entry list = 235 - List.filter (fun (e : entry) -> Ptime.compare e.timestamp timestamp > 0) t.all_entries 263 + List.filter 264 + (fun (e : entry) -> Ptime.compare e.timestamp timestamp > 0) 265 + t.all_entries 236 266 237 267 let for_repo t repo = 238 268 String_map.find_opt repo t.by_repo |> Option.value ~default:[] ··· 240 270 let for_date t date = 241 271 String_map.find_opt date t.by_date |> Option.value ~default:[] 242 272 243 - let repos t = 244 - String_map.bindings t.by_repo |> List.map fst 273 + let repos t = String_map.bindings t.by_repo |> List.map fst 245 274 246 275 let dates t = 247 276 String_map.bindings t.by_date 248 277 |> List.map fst 249 - |> List.sort (fun a b -> String.compare b a) (* descending *) 278 + |> List.sort (fun a b -> String.compare b a) 279 + (* descending *) 250 280 251 281 let entries_since ~fs ~changes_dir ~since:timestamp = 252 282 let t = load_all ~fs ~changes_dir in
+14 -24
lib/changes_daily.mli
··· 12 12 13 13 (** {1 Types} *) 14 14 15 - type commit_range = { 16 - from_hash : string; 17 - to_hash : string; 18 - count : int; 19 - } 15 + type commit_range = { from_hash : string; to_hash : string; count : int } 20 16 (** Commit range information. *) 21 17 22 18 type entry = { ··· 43 39 44 40 type t = { 45 41 by_repo : day list String_map.t; 46 - (** Map from repository name to list of days. *) 42 + (** Map from repository name to list of days. *) 47 43 by_date : day list String_map.t; 48 - (** Map from date (YYYY-MM-DD) to list of days across repos. *) 49 - all_entries : entry list; 50 - (** All entries sorted by timestamp ascending. *) 44 + (** Map from date (YYYY-MM-DD) to list of days across repos. *) 45 + all_entries : entry list; (** All entries sorted by timestamp ascending. *) 51 46 } 52 47 (** Immutable collection of all loaded daily changes. *) 53 48 ··· 57 52 (** Empty daily changes structure. *) 58 53 59 54 val load_all : fs:_ Eio.Path.t -> changes_dir:Fpath.t -> t 60 - (** [load_all ~fs ~changes_dir] loads all [<repo>-<YYYY-MM-DD>.json] files 61 - from the changes directory and returns an immutable structure for querying. *) 55 + (** [load_all ~fs ~changes_dir] loads all [<repo>-<YYYY-MM-DD>.json] files from 56 + the changes directory and returns an immutable structure for querying. *) 62 57 63 58 (** {1 Querying} *) 64 59 ··· 67 62 sorted by timestamp ascending. *) 68 63 69 64 val for_repo : t -> string -> day list 70 - (** [for_repo t repo] returns all days for the given repository, 71 - sorted by date descending. *) 65 + (** [for_repo t repo] returns all days for the given repository, sorted by date 66 + descending. *) 72 67 73 68 val for_date : t -> string -> day list 74 69 (** [for_date t date] returns all days (across repos) for the given date. *) ··· 82 77 (** {1 File Discovery} *) 83 78 84 79 val list_repos : fs:_ Eio.Path.t -> changes_dir:Fpath.t -> string list 85 - (** [list_repos ~fs ~changes_dir] returns all repository names that have 86 - daily change files. *) 80 + (** [list_repos ~fs ~changes_dir] returns all repository names that have daily 81 + change files. *) 87 82 88 - val list_dates : fs:_ Eio.Path.t -> changes_dir:Fpath.t -> repo:string -> string list 83 + val list_dates : 84 + fs:_ Eio.Path.t -> changes_dir:Fpath.t -> repo:string -> string list 89 85 (** [list_dates ~fs ~changes_dir ~repo] returns all dates for which the given 90 86 repository has change files. *) 91 87 ··· 101 97 repo and date. Returns empty list if file doesn't exist. *) 102 98 103 99 val load_repo_all : 104 - fs:_ Eio.Path.t -> 105 - changes_dir:Fpath.t -> 106 - repo:string -> 107 - entry list 100 + fs:_ Eio.Path.t -> changes_dir:Fpath.t -> repo:string -> entry list 108 101 (** [load_repo_all ~fs ~changes_dir ~repo] loads all entries for a repository 109 102 across all dates. *) 110 103 111 104 val entries_since : 112 - fs:_ Eio.Path.t -> 113 - changes_dir:Fpath.t -> 114 - since:Ptime.t -> 115 - entry list 105 + fs:_ Eio.Path.t -> changes_dir:Fpath.t -> since:Ptime.t -> entry list 116 106 (** [entries_since ~fs ~changes_dir ~since] returns all entries created after 117 107 the given timestamp, useful for real-time updates. *)
+107 -67
lib/changes_query.ml
··· 19 19 let (y, m, d), _ = Ptime.to_date_time now in 20 20 Printf.sprintf "%04d-%02d-%02d" y m d 21 21 in 22 - match Changes_aggregated.load_range ~fs ~changes_dir ~from_date:since_date ~to_date:now_date with 22 + match 23 + Changes_aggregated.load_range ~fs ~changes_dir ~from_date:since_date 24 + ~to_date:now_date 25 + with 23 26 | Error e -> Error e 24 27 | Ok aggregated_files -> 25 28 (* Filter to files generated after 'since' and collect entries *) 26 - let entries = List.concat_map (fun (agg : Changes_aggregated.t) -> 27 - if Ptime.compare agg.generated_at since > 0 then 28 - agg.entries 29 - else 30 - []) aggregated_files 29 + let entries = 30 + List.concat_map 31 + (fun (agg : Changes_aggregated.t) -> 32 + if Ptime.compare agg.generated_at since > 0 then agg.entries else []) 33 + aggregated_files 31 34 in 32 35 Ok entries 33 36 ··· 39 42 let format_repo_link repo url_opt = 40 43 match url_opt with 41 44 | Some url -> Printf.sprintf "[%s](%s)" repo url 42 - | None -> Printf.sprintf "[%s](https://tangled.org/@anil.recoil.org/%s.git)" repo repo 45 + | None -> 46 + Printf.sprintf "[%s](https://tangled.org/@anil.recoil.org/%s.git)" repo 47 + repo 43 48 44 49 let format_for_zulip ~entries ~include_date ~date = 45 - if entries = [] then 46 - "No changes to report." 50 + if entries = [] then "No changes to report." 47 51 else begin 48 52 let buf = Buffer.create 1024 in 49 53 if include_date then begin ··· 52 56 | None -> Buffer.add_string buf "Recent updates:\n\n" 53 57 end; 54 58 (* Group by change type *) 55 - let by_type = [ 56 - (Changes_aggregated.New_library, "New Libraries", []); 57 - (Changes_aggregated.Feature, "Features", []); 58 - (Changes_aggregated.Bugfix, "Bug Fixes", []); 59 - (Changes_aggregated.Documentation, "Documentation", []); 60 - (Changes_aggregated.Refactor, "Improvements", []); 61 - (Changes_aggregated.Unknown, "Other Changes", []); 62 - ] in 63 - let grouped = List.map (fun (ct, title, _) -> 64 - let matching = List.filter (fun (e : Changes_aggregated.entry) -> e.change_type = ct) entries in 65 - (ct, title, matching)) by_type 59 + let by_type = 60 + [ 61 + (Changes_aggregated.New_library, "New Libraries", []); 62 + (Changes_aggregated.Feature, "Features", []); 63 + (Changes_aggregated.Bugfix, "Bug Fixes", []); 64 + (Changes_aggregated.Documentation, "Documentation", []); 65 + (Changes_aggregated.Refactor, "Improvements", []); 66 + (Changes_aggregated.Unknown, "Other Changes", []); 67 + ] 68 + in 69 + let grouped = 70 + List.map 71 + (fun (ct, title, _) -> 72 + let matching = 73 + List.filter 74 + (fun (e : Changes_aggregated.entry) -> e.change_type = ct) 75 + entries 76 + in 77 + (ct, title, matching)) 78 + by_type 66 79 in 67 - List.iter (fun (_ct, title, entries) -> 68 - if entries <> [] then begin 69 - Buffer.add_string buf (Printf.sprintf "### %s\n\n" title); 70 - List.iter (fun (entry : Changes_aggregated.entry) -> 71 - let repo_link = format_repo_link entry.repository entry.repo_url in 72 - Buffer.add_string buf (Printf.sprintf "**%s**: %s\n" repo_link entry.summary); 73 - List.iter (fun change -> 74 - Buffer.add_string buf (Printf.sprintf "- %s\n" change)) entry.changes; 75 - if entry.contributors <> [] then 76 - Buffer.add_string buf (Printf.sprintf "*Contributors: %s*\n" 77 - (String.concat ", " entry.contributors)); 78 - Buffer.add_string buf "\n") entries 79 - end) grouped; 80 + List.iter 81 + (fun (_ct, title, entries) -> 82 + if entries <> [] then begin 83 + Buffer.add_string buf (Printf.sprintf "### %s\n\n" title); 84 + List.iter 85 + (fun (entry : Changes_aggregated.entry) -> 86 + let repo_link = 87 + format_repo_link entry.repository entry.repo_url 88 + in 89 + Buffer.add_string buf 90 + (Printf.sprintf "**%s**: %s\n" repo_link entry.summary); 91 + List.iter 92 + (fun change -> 93 + Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 94 + entry.changes; 95 + if entry.contributors <> [] then 96 + Buffer.add_string buf 97 + (Printf.sprintf "*Contributors: %s*\n" 98 + (String.concat ", " entry.contributors)); 99 + Buffer.add_string buf "\n") 100 + entries 101 + end) 102 + grouped; 80 103 Buffer.contents buf 81 104 end 82 105 83 106 let format_summary ~entries = 84 - if entries = [] then 85 - "No new changes." 107 + if entries = [] then "No new changes." 86 108 else 87 109 let count = List.length entries in 88 - let repos = List.sort_uniq String.compare 89 - (List.map (fun (e : Changes_aggregated.entry) -> e.repository) entries) in 90 - Printf.sprintf "%d change%s across %d repositor%s: %s" 91 - count (if count = 1 then "" else "s") 92 - (List.length repos) (if List.length repos = 1 then "y" else "ies") 110 + let repos = 111 + List.sort_uniq String.compare 112 + (List.map (fun (e : Changes_aggregated.entry) -> e.repository) entries) 113 + in 114 + Printf.sprintf "%d change%s across %d repositor%s: %s" count 115 + (if count = 1 then "" else "s") 116 + (List.length repos) 117 + (if List.length repos = 1 then "y" else "ies") 93 118 (String.concat ", " repos) 94 119 95 120 (** {1 Daily Changes (Real-time)} *) ··· 101 126 daily_changes_since ~fs ~changes_dir ~since <> [] 102 127 103 128 let format_daily_for_zulip ~entries ~include_date ~date = 104 - if entries = [] then 105 - "No changes to report." 129 + if entries = [] then "No changes to report." 106 130 else begin 107 131 let buf = Buffer.create 1024 in 108 132 if include_date then begin 109 133 match date with 110 - | Some d -> Buffer.add_string buf (Printf.sprintf "## Changes for %s\n\n" d) 134 + | Some d -> 135 + Buffer.add_string buf (Printf.sprintf "## Changes for %s\n\n" d) 111 136 | None -> Buffer.add_string buf "## Recent Changes\n\n" 112 137 end; 113 138 (* Group by repository *) 114 - let repos = List.sort_uniq String.compare 115 - (List.map (fun (e : Changes_daily.entry) -> e.repository) entries) in 116 - List.iter (fun repo -> 117 - let repo_entries = List.filter (fun (e : Changes_daily.entry) -> e.repository = repo) entries in 118 - if repo_entries <> [] then begin 119 - let first_entry = List.hd repo_entries in 120 - let repo_link = format_repo_link repo first_entry.repo_url in 121 - Buffer.add_string buf (Printf.sprintf "### %s\n\n" repo_link); 122 - List.iter (fun (entry : Changes_daily.entry) -> 123 - Buffer.add_string buf (Printf.sprintf "**%s**\n" entry.summary); 124 - List.iter (fun change -> 125 - Buffer.add_string buf (Printf.sprintf "- %s\n" change)) entry.changes; 126 - if entry.contributors <> [] then 127 - Buffer.add_string buf (Printf.sprintf "*Contributors: %s*\n" 128 - (String.concat ", " entry.contributors)); 129 - Buffer.add_string buf "\n") repo_entries 130 - end) repos; 139 + let repos = 140 + List.sort_uniq String.compare 141 + (List.map (fun (e : Changes_daily.entry) -> e.repository) entries) 142 + in 143 + List.iter 144 + (fun repo -> 145 + let repo_entries = 146 + List.filter 147 + (fun (e : Changes_daily.entry) -> e.repository = repo) 148 + entries 149 + in 150 + if repo_entries <> [] then begin 151 + let first_entry = List.hd repo_entries in 152 + let repo_link = format_repo_link repo first_entry.repo_url in 153 + Buffer.add_string buf (Printf.sprintf "### %s\n\n" repo_link); 154 + List.iter 155 + (fun (entry : Changes_daily.entry) -> 156 + Buffer.add_string buf (Printf.sprintf "**%s**\n" entry.summary); 157 + List.iter 158 + (fun change -> 159 + Buffer.add_string buf (Printf.sprintf "- %s\n" change)) 160 + entry.changes; 161 + if entry.contributors <> [] then 162 + Buffer.add_string buf 163 + (Printf.sprintf "*Contributors: %s*\n" 164 + (String.concat ", " entry.contributors)); 165 + Buffer.add_string buf "\n") 166 + repo_entries 167 + end) 168 + repos; 131 169 Buffer.contents buf 132 170 end 133 171 134 172 let format_daily_summary ~entries = 135 - if entries = [] then 136 - "No new changes." 173 + if entries = [] then "No new changes." 137 174 else 138 175 let count = List.length entries in 139 - let repos = List.sort_uniq String.compare 140 - (List.map (fun (e : Changes_daily.entry) -> e.repository) entries) in 141 - Printf.sprintf "%d change%s across %d repositor%s: %s" 142 - count (if count = 1 then "" else "s") 143 - (List.length repos) (if List.length repos = 1 then "y" else "ies") 176 + let repos = 177 + List.sort_uniq String.compare 178 + (List.map (fun (e : Changes_daily.entry) -> e.repository) entries) 179 + in 180 + Printf.sprintf "%d change%s across %d repositor%s: %s" count 181 + (if count = 1 then "" else "s") 182 + (List.length repos) 183 + (if List.length repos = 1 then "y" else "ies") 144 184 (String.concat ", " repos)
+12 -24
lib/changes_query.mli
··· 16 16 since:Ptime.t -> 17 17 now:Ptime.t -> 18 18 (Changes_aggregated.entry list, string) result 19 - (** Get all change entries from aggregated files created after [since]. 20 - Returns entries from all days after the timestamp. 19 + (** Get all change entries from aggregated files created after [since]. Returns 20 + entries from all days after the timestamp. 21 21 @param now Current time for determining the date range end. *) 22 22 23 23 val has_new_changes : 24 - fs:_ Eio.Path.t -> 25 - changes_dir:Fpath.t -> 26 - since:Ptime.t -> 27 - now:Ptime.t -> 28 - bool 24 + fs:_ Eio.Path.t -> changes_dir:Fpath.t -> since:Ptime.t -> now:Ptime.t -> bool 29 25 (** Check if there are any new changes since the given timestamp. 30 26 @param now Current time for determining the date range end. *) 31 27 ··· 36 32 include_date:bool -> 37 33 date:string option -> 38 34 string 39 - (** Format entries as markdown suitable for Zulip. 40 - If [include_date] is true, includes a date header. 41 - [date] is used for the header if provided. *) 35 + (** Format entries as markdown suitable for Zulip. If [include_date] is true, 36 + includes a date header. [date] is used for the header if provided. *) 42 37 43 - val format_summary : 44 - entries:Changes_aggregated.entry list -> 45 - string 38 + val format_summary : entries:Changes_aggregated.entry list -> string 46 39 (** Format a brief summary of the changes. *) 47 40 48 41 (** {1 Daily Changes (Real-time)} *) ··· 52 45 changes_dir:Fpath.t -> 53 46 since:Ptime.t -> 54 47 Changes_daily.entry list 55 - (** Get all daily change entries created after [since] timestamp. 56 - Uses the per-day-per-repo files for real-time access. *) 48 + (** Get all daily change entries created after [since] timestamp. Uses the 49 + per-day-per-repo files for real-time access. *) 57 50 58 51 val has_new_daily_changes : 59 - fs:_ Eio.Path.t -> 60 - changes_dir:Fpath.t -> 61 - since:Ptime.t -> 62 - bool 52 + fs:_ Eio.Path.t -> changes_dir:Fpath.t -> since:Ptime.t -> bool 63 53 (** Check if there are any new daily changes since the given timestamp. *) 64 54 65 55 val format_daily_for_zulip : ··· 67 57 include_date:bool -> 68 58 date:string option -> 69 59 string 70 - (** Format daily entries as markdown suitable for Zulip. 71 - Groups entries by repository. *) 60 + (** Format daily entries as markdown suitable for Zulip. Groups entries by 61 + repository. *) 72 62 73 - val format_daily_summary : 74 - entries:Changes_daily.entry list -> 75 - string 63 + val format_daily_summary : entries:Changes_daily.entry list -> string 76 64 (** Format a brief summary of daily changes. *)
+63 -41
lib/cross_status.ml
··· 1 1 (** Cross-user repository comparison for monopam. 2 2 3 - Compares subtrees across multiple verse users' monorepos to identify 4 - common repositories and their relative commit states. *) 3 + Compares subtrees across multiple verse users' monorepos to identify common 4 + repositories and their relative commit states. *) 5 5 6 6 (** Relationship between two subtree commits. *) 7 7 type relationship = ··· 12 12 (** Commits have diverged from a common ancestor *) 13 13 | Unknown (** Cannot determine relationship (missing commits, etc.) *) 14 14 15 - (** Information about a subtree in a monorepo. *) 16 15 type subtree_info = { 17 16 monorepo_path : Fpath.t; (** Path to the monorepo *) 18 17 prefix : string; (** Subtree directory name *) 19 18 upstream_commit : string option; (** Last synced upstream commit SHA *) 20 19 } 20 + (** Information about a subtree in a monorepo. *) 21 21 22 - (** Comparison of a repo across multiple users. *) 23 22 type repo_comparison = { 24 23 repo_name : string; (** Repository/subtree name *) 25 - my_info : subtree_info option; (** My subtree info (None if not in my mono) *) 24 + my_info : subtree_info option; 25 + (** My subtree info (None if not in my mono) *) 26 26 others : (string * subtree_info * relationship) list; 27 27 (** List of (handle, info, relationship to me) *) 28 28 } 29 + (** Comparison of a repo across multiple users. *) 29 30 30 - (** Summary of all cross-user comparisons. *) 31 31 type t = { 32 32 my_repos : repo_comparison list; (** Repos I have, compared against others *) 33 33 other_repos : (string * string list) list; 34 34 (** Repos I don't have: (repo_name, list of handles who have it) *) 35 35 } 36 + (** Summary of all cross-user comparisons. *) 36 37 37 38 let pp_relationship ppf = function 38 39 | Same -> Fmt.string ppf "same" 39 - | I_am_ahead n -> Fmt.pf ppf "%d behind" n (* They are behind me *) 40 - | I_am_behind n -> Fmt.pf ppf "%d ahead" n (* They are ahead of me *) 40 + | I_am_ahead n -> Fmt.pf ppf "%d behind" n (* They are behind me *) 41 + | I_am_behind n -> Fmt.pf ppf "%d ahead" n (* They are ahead of me *) 41 42 | Diverged { my_ahead; their_ahead } -> 42 43 Fmt.pf ppf "diverged: them +%d, me +%d" their_ahead my_ahead 43 44 | Unknown -> Fmt.string ppf "unknown" 44 45 45 46 let pp_subtree_info ppf info = 46 47 match info.upstream_commit with 47 - | Some commit -> Fmt.pf ppf "%s" (String.sub commit 0 (min 7 (String.length commit))) 48 + | Some commit -> 49 + Fmt.pf ppf "%s" (String.sub commit 0 (min 7 (String.length commit))) 48 50 | None -> Fmt.string ppf "(no commit)" 49 51 50 52 let pp_repo_comparison ppf comp = ··· 54 56 | None -> ()); 55 57 List.iter 56 58 (fun (handle, info, rel) -> 57 - Fmt.pf ppf "%-19s %a (%a)@," handle pp_subtree_info info pp_relationship rel) 59 + Fmt.pf ppf "%-19s %a (%a)@," handle pp_subtree_info info pp_relationship 60 + rel) 58 61 comp.others; 59 62 Fmt.pf ppf "@]" 60 63 ··· 62 65 let pp ppf t = 63 66 if t.my_repos <> [] then begin 64 67 Fmt.pf ppf "@[<v>Cross-user comparison:@,"; 65 - List.iter (fun comp -> Fmt.pf ppf " %a@," pp_repo_comparison comp) t.my_repos; 68 + List.iter 69 + (fun comp -> Fmt.pf ppf " %a@," pp_repo_comparison comp) 70 + t.my_repos; 66 71 Fmt.pf ppf "@]" 67 72 end; 68 73 if t.other_repos <> [] then begin ··· 97 102 let with_actions = ref [] in 98 103 let in_sync = ref [] in 99 104 100 - List.iter (fun comp -> 101 - let actionable = 102 - List.filter (fun (_, _, rel) -> is_actionable rel) comp.others 103 - in 104 - if actionable <> [] then 105 - with_actions := (comp, actionable) :: !with_actions 106 - else 107 - in_sync := comp :: !in_sync) 105 + List.iter 106 + (fun comp -> 107 + let actionable = 108 + List.filter (fun (_, _, rel) -> is_actionable rel) comp.others 109 + in 110 + if actionable <> [] then 111 + with_actions := (comp, actionable) :: !with_actions 112 + else in_sync := comp :: !in_sync) 108 113 t.my_repos; 109 114 110 115 (* Print repos with actions needed first *) 111 116 if !with_actions <> [] then begin 112 117 Fmt.pf ppf "@[<v>@,Subtrees with upstream changes:@,"; 113 - List.iter (fun (comp, actionable) -> 114 - let changes = List.map (fun (h, _, rel) -> 115 - Fmt.str "%s:%a" h pp_rel_short rel) actionable 116 - in 117 - Fmt.pf ppf " %-24s %s@," comp.repo_name (String.concat " " changes)) 118 + List.iter 119 + (fun (comp, actionable) -> 120 + let changes = 121 + List.map 122 + (fun (h, _, rel) -> Fmt.str "%s:%a" h pp_rel_short rel) 123 + actionable 124 + in 125 + Fmt.pf ppf " %-24s %s@," comp.repo_name (String.concat " " changes)) 118 126 (List.rev !with_actions); 119 127 Fmt.pf ppf "@]" 120 128 end; ··· 137 145 in 138 146 { monorepo_path; prefix; upstream_commit } 139 147 140 - (** Compare two subtree commits using a reference checkout. 141 - If checkout is available, use it as the authoritative source. 142 - Otherwise, just check if commits match. *) 148 + (** Compare two subtree commits using a reference checkout. If checkout is 149 + available, use it as the authoritative source. Otherwise, just check if 150 + commits match. *) 143 151 let compare_commits ~proc ~fs ~checkout_path ~my_commit ~their_commit () = 144 152 match (my_commit, their_commit) with 145 153 | None, _ | _, None -> Unknown ··· 150 158 else begin 151 159 (* Check if either is ancestor of the other *) 152 160 let my_is_ancestor = 153 - Git.is_ancestor ~proc ~fs ~repo:checkout_path ~commit1:my ~commit2:their () 161 + Git.is_ancestor ~proc ~fs ~repo:checkout_path ~commit1:my 162 + ~commit2:their () 154 163 in 155 164 let their_is_ancestor = 156 - Git.is_ancestor ~proc ~fs ~repo:checkout_path ~commit1:their ~commit2:my () 165 + Git.is_ancestor ~proc ~fs ~repo:checkout_path ~commit1:their 166 + ~commit2:my () 157 167 in 158 168 match (my_is_ancestor, their_is_ancestor) with 159 169 | true, false -> 160 170 (* My commit is ancestor of theirs -> I'm behind *) 161 171 let behind = 162 - Git.count_commits_between ~proc ~fs ~repo:checkout_path ~base:my ~head:their () 172 + Git.count_commits_between ~proc ~fs ~repo:checkout_path ~base:my 173 + ~head:their () 163 174 in 164 175 I_am_behind behind 165 176 | false, true -> 166 177 (* Their commit is ancestor of mine -> I'm ahead *) 167 178 let ahead = 168 - Git.count_commits_between ~proc ~fs ~repo:checkout_path ~base:their ~head:my () 179 + Git.count_commits_between ~proc ~fs ~repo:checkout_path 180 + ~base:their ~head:my () 169 181 in 170 182 I_am_ahead ahead 171 183 | true, true -> 172 184 (* Both are ancestors of each other -> same commit *) 173 185 Same 174 - | false, false -> 186 + | false, false -> ( 175 187 (* Neither is ancestor -> diverged *) 176 - (match Git.merge_base ~proc ~fs ~repo:checkout_path ~commit1:my ~commit2:their () with 188 + match 189 + Git.merge_base ~proc ~fs ~repo:checkout_path ~commit1:my 190 + ~commit2:their () 191 + with 177 192 | Error _ -> Unknown 178 193 | Ok base -> 179 194 let my_ahead = 180 - Git.count_commits_between ~proc ~fs ~repo:checkout_path ~base ~head:my () 195 + Git.count_commits_between ~proc ~fs ~repo:checkout_path ~base 196 + ~head:my () 181 197 in 182 198 let their_ahead = 183 - Git.count_commits_between ~proc ~fs ~repo:checkout_path ~base ~head:their () 199 + Git.count_commits_between ~proc ~fs ~repo:checkout_path ~base 200 + ~head:their () 184 201 in 185 202 Diverged { my_ahead; their_ahead }) 186 203 end 187 204 188 - (** Compute cross-user status comparing my monorepo against all verse members. *) 205 + (** Compute cross-user status comparing my monorepo against all verse members. 206 + *) 189 207 let compute ~proc ~fs ~verse_config ~monopam_config () = 190 208 let my_mono = Verse_config.mono_path verse_config in 191 209 let checkouts = Config.Paths.checkouts monopam_config in ··· 194 212 let my_subtrees = Verse.scan_subtrees ~proc ~fs my_mono in 195 213 196 214 (* Get verse subtrees (map: repo_name -> [(handle, monorepo_path)]) *) 197 - let verse_subtrees = Verse.get_verse_subtrees ~proc ~fs ~config:verse_config () in 215 + let verse_subtrees = 216 + Verse.get_verse_subtrees ~proc ~fs ~config:verse_config () 217 + in 198 218 199 219 (* Build comparisons for repos I have *) 200 220 let my_repos = 201 221 List.filter_map 202 222 (fun repo_name -> 203 - let my_info = get_subtree_info ~proc ~fs ~monorepo_path:my_mono ~prefix:repo_name () in 223 + let my_info = 224 + get_subtree_info ~proc ~fs ~monorepo_path:my_mono ~prefix:repo_name () 225 + in 204 226 let checkout_path = Fpath.(checkouts / repo_name) in 205 227 206 228 (* Find others who have this repo *) ··· 208 230 try Hashtbl.find verse_subtrees repo_name with Not_found -> [] 209 231 in 210 232 211 - if others_with_repo = [] then 212 - None (* No one else has this repo, skip *) 233 + if others_with_repo = [] then None (* No one else has this repo, skip *) 213 234 else begin 214 235 let others = 215 236 List.map 216 237 (fun (handle, their_mono) -> 217 238 let their_info = 218 - get_subtree_info ~proc ~fs ~monorepo_path:their_mono ~prefix:repo_name () 239 + get_subtree_info ~proc ~fs ~monorepo_path:their_mono 240 + ~prefix:repo_name () 219 241 in 220 242 let rel = 221 243 compare_commits ~proc ~fs ~checkout_path
+11 -10
lib/cross_status.mli
··· 1 1 (** Cross-user repository comparison for monopam. 2 2 3 - Compares subtrees across multiple verse users' monorepos to identify 4 - common repositories and their relative commit states. *) 3 + Compares subtrees across multiple verse users' monorepos to identify common 4 + repositories and their relative commit states. *) 5 5 6 6 (** {1 Types} *) 7 7 ··· 14 14 (** Commits have diverged from a common ancestor *) 15 15 | Unknown (** Cannot determine relationship (missing commits, etc.) *) 16 16 17 - (** Information about a subtree in a monorepo. *) 18 17 type subtree_info = { 19 18 monorepo_path : Fpath.t; (** Path to the monorepo *) 20 19 prefix : string; (** Subtree directory name *) 21 20 upstream_commit : string option; (** Last synced upstream commit SHA *) 22 21 } 22 + (** Information about a subtree in a monorepo. *) 23 23 24 - (** Comparison of a repo across multiple users. *) 25 24 type repo_comparison = { 26 25 repo_name : string; (** Repository/subtree name *) 27 - my_info : subtree_info option; (** My subtree info (None if not in my mono) *) 26 + my_info : subtree_info option; 27 + (** My subtree info (None if not in my mono) *) 28 28 others : (string * subtree_info * relationship) list; 29 29 (** List of (handle, info, relationship to me) *) 30 30 } 31 + (** Comparison of a repo across multiple users. *) 31 32 32 - (** Summary of all cross-user comparisons. *) 33 33 type t = { 34 34 my_repos : repo_comparison list; (** Repos I have, compared against others *) 35 35 other_repos : (string * string list) list; 36 36 (** Repos I don't have: (repo_name, list of handles who have it) *) 37 37 } 38 + (** Summary of all cross-user comparisons. *) 38 39 39 40 (** {1 Pretty Printing} *) 40 41 ··· 51 52 (** [pp] formats the full cross-user status with commit SHAs. *) 52 53 53 54 val pp_summary : t Fmt.t 54 - (** [pp_summary] formats a succinct summary with emphasis on repos where 55 - others have commits not in mine. *) 55 + (** [pp_summary] formats a succinct summary with emphasis on repos where others 56 + have commits not in mine. *) 56 57 57 58 val is_actionable : relationship -> bool 58 - (** [is_actionable rel] returns [true] if the relationship indicates 59 - that others have commits I should consider pulling (I_am_behind or Diverged). *) 59 + (** [is_actionable rel] returns [true] if the relationship indicates that others 60 + have commits I should consider pulling (I_am_behind or Diverged). *) 60 61 61 62 (** {1 Computation} *) 62 63
+562 -305
lib/doctor.ml
··· 1 1 (** Doctor command - Claude-powered workspace health analysis. 2 2 3 - Analyzes workspace state, verse member commits, and provides 4 - actionable recommendations for maintaining your monorepo. *) 3 + Analyzes workspace state, verse member commits, and provides actionable 4 + recommendations for maintaining your monorepo. *) 5 5 6 6 let src = Logs.Src.create "monopam.doctor" ~doc:"Doctor analysis" 7 + 7 8 module Log = (val Logs.src_log src : Logs.LOG) 8 9 9 10 (** {1 Types} *) ··· 19 20 | Other 20 21 21 22 (** Priority level for a change *) 22 - type priority = 23 - | Critical 24 - | High 25 - | Medium 26 - | Low 23 + type priority = Critical | High | Medium | Low 27 24 28 25 (** Recommended action for a commit *) 29 - type recommendation = 30 - | Merge_now 31 - | Review_first 32 - | Skip 33 - | Needs_discussion 26 + type recommendation = Merge_now | Review_first | Skip | Needs_discussion 34 27 35 28 (** Risk of conflicts when merging *) 36 - type conflict_risk = 37 - | None_risk 38 - | Low_risk 39 - | Medium_risk 40 - | High_risk 29 + type conflict_risk = None_risk | Low_risk | Medium_risk | High_risk 41 30 42 - (** Analysis of a single commit from a verse member *) 43 31 type commit_analysis = { 44 32 hash : string; 45 33 subject : string; ··· 51 39 conflict_risk : conflict_risk; 52 40 commit_summary : string; 53 41 } 42 + (** Analysis of a single commit from a verse member *) 54 43 55 - (** Analysis of commits from a specific verse member for a repo *) 56 44 type verse_analysis = { 57 45 handle : string; 58 46 commits : commit_analysis list; 59 47 suggested_action : string option; 60 48 } 49 + (** Analysis of commits from a specific verse member for a repo *) 61 50 62 - (** Sync status for a single repository *) 63 51 type repo_sync = { 64 52 name : string; 65 53 local_sync : [ `In_sync | `Ahead of int | `Behind of int | `Needs_sync ]; ··· 67 55 remote_behind : int; 68 56 verse_analyses : verse_analysis list; 69 57 } 58 + (** Sync status for a single repository *) 70 59 71 - (** Summary statistics *) 72 60 type report_summary = { 73 61 repos_total : int; 74 62 repos_need_sync : int; 75 63 repos_behind_upstream : int; 76 64 verse_divergences : int; 77 65 } 66 + (** Summary statistics *) 78 67 79 - (** Actionable recommendation *) 80 68 type action = { 81 69 action_priority : priority; 82 70 description : string; 83 71 command : string option; 84 72 } 73 + (** Actionable recommendation *) 85 74 86 - (** Full doctor report *) 87 75 type report = { 88 76 timestamp : string; 89 77 workspace : string; ··· 92 80 recommendations : action list; 93 81 warnings : string list; 94 82 } 83 + (** Full doctor report *) 95 84 96 85 (** {1 JSON Encoding} *) 97 86 ··· 151 140 | _ -> Low_risk 152 141 153 142 let commit_analysis_jsont = 154 - let make hash subject author date category priority recommendation conflict_risk commit_summary = 155 - { hash; subject; author; date; 143 + let make hash subject author date category priority recommendation 144 + conflict_risk commit_summary = 145 + { 146 + hash; 147 + subject; 148 + author; 149 + date; 156 150 category = change_category_of_string category; 157 151 priority = priority_of_string priority; 158 152 recommendation = recommendation_of_string recommendation; 159 153 conflict_risk = conflict_risk_of_string conflict_risk; 160 - commit_summary } 154 + commit_summary; 155 + } 161 156 in 162 157 Jsont.Object.map ~kind:"commit_analysis" make 163 158 |> Jsont.Object.mem "hash" Jsont.string ~enc:(fun c -> c.hash) 164 159 |> Jsont.Object.mem "subject" Jsont.string ~enc:(fun c -> c.subject) 165 160 |> Jsont.Object.mem "author" Jsont.string ~enc:(fun c -> c.author) 166 161 |> Jsont.Object.mem "date" Jsont.string ~enc:(fun c -> c.date) 167 - |> Jsont.Object.mem "category" Jsont.string ~enc:(fun c -> change_category_to_string c.category) 168 - |> Jsont.Object.mem "priority" Jsont.string ~enc:(fun c -> priority_to_string c.priority) 169 - |> Jsont.Object.mem "recommendation" Jsont.string ~enc:(fun c -> recommendation_to_string c.recommendation) 170 - |> Jsont.Object.mem "conflict_risk" Jsont.string ~enc:(fun c -> conflict_risk_to_string c.conflict_risk) 162 + |> Jsont.Object.mem "category" Jsont.string ~enc:(fun c -> 163 + change_category_to_string c.category) 164 + |> Jsont.Object.mem "priority" Jsont.string ~enc:(fun c -> 165 + priority_to_string c.priority) 166 + |> Jsont.Object.mem "recommendation" Jsont.string ~enc:(fun c -> 167 + recommendation_to_string c.recommendation) 168 + |> Jsont.Object.mem "conflict_risk" Jsont.string ~enc:(fun c -> 169 + conflict_risk_to_string c.conflict_risk) 171 170 |> Jsont.Object.mem "summary" Jsont.string ~enc:(fun c -> c.commit_summary) 172 171 |> Jsont.Object.finish 173 172 174 173 let verse_analysis_jsont = 175 - let make handle commits suggested_action = { handle; commits; suggested_action } in 174 + let make handle commits suggested_action = 175 + { handle; commits; suggested_action } 176 + in 176 177 Jsont.Object.map ~kind:"verse_analysis" make 177 178 |> Jsont.Object.mem "handle" Jsont.string ~enc:(fun v -> v.handle) 178 - |> Jsont.Object.mem "commits" (Jsont.list commit_analysis_jsont) ~enc:(fun v -> v.commits) 179 - |> Jsont.Object.mem "suggested_action" (Jsont.option Jsont.string) ~dec_absent:None ~enc:(fun v -> v.suggested_action) 179 + |> Jsont.Object.mem "commits" (Jsont.list commit_analysis_jsont) 180 + ~enc:(fun v -> v.commits) 181 + |> Jsont.Object.mem "suggested_action" (Jsont.option Jsont.string) 182 + ~dec_absent:None ~enc:(fun v -> v.suggested_action) 180 183 |> Jsont.Object.finish 181 184 182 185 let local_sync_to_string = function ··· 196 199 197 200 let repo_sync_jsont = 198 201 let make name local_sync remote_ahead remote_behind verse_analyses = 199 - { name; local_sync = local_sync_of_string local_sync; remote_ahead; remote_behind; verse_analyses } 202 + { 203 + name; 204 + local_sync = local_sync_of_string local_sync; 205 + remote_ahead; 206 + remote_behind; 207 + verse_analyses; 208 + } 200 209 in 201 210 Jsont.Object.map ~kind:"repo_sync" make 202 211 |> Jsont.Object.mem "name" Jsont.string ~enc:(fun r -> r.name) 203 - |> Jsont.Object.mem "local_sync" Jsont.string ~enc:(fun r -> local_sync_to_string r.local_sync) 212 + |> Jsont.Object.mem "local_sync" Jsont.string ~enc:(fun r -> 213 + local_sync_to_string r.local_sync) 204 214 |> Jsont.Object.mem "remote_ahead" Jsont.int ~enc:(fun r -> r.remote_ahead) 205 215 |> Jsont.Object.mem "remote_behind" Jsont.int ~enc:(fun r -> r.remote_behind) 206 - |> Jsont.Object.mem "verse_analyses" (Jsont.list verse_analysis_jsont) ~enc:(fun r -> r.verse_analyses) 216 + |> Jsont.Object.mem "verse_analyses" (Jsont.list verse_analysis_jsont) 217 + ~enc:(fun r -> r.verse_analyses) 207 218 |> Jsont.Object.finish 208 219 209 220 let report_summary_jsont = 210 - let make repos_total repos_need_sync repos_behind_upstream verse_divergences : report_summary = 221 + let make repos_total repos_need_sync repos_behind_upstream verse_divergences : 222 + report_summary = 211 223 { repos_total; repos_need_sync; repos_behind_upstream; verse_divergences } 212 224 in 213 225 Jsont.Object.map ~kind:"report_summary" make 214 226 |> Jsont.Object.mem "repos_total" Jsont.int ~enc:(fun s -> s.repos_total) 215 - |> Jsont.Object.mem "repos_need_sync" Jsont.int ~enc:(fun s -> s.repos_need_sync) 216 - |> Jsont.Object.mem "repos_behind_upstream" Jsont.int ~enc:(fun s -> s.repos_behind_upstream) 217 - |> Jsont.Object.mem "verse_divergences" Jsont.int ~enc:(fun s -> s.verse_divergences) 227 + |> Jsont.Object.mem "repos_need_sync" Jsont.int ~enc:(fun s -> 228 + s.repos_need_sync) 229 + |> Jsont.Object.mem "repos_behind_upstream" Jsont.int ~enc:(fun s -> 230 + s.repos_behind_upstream) 231 + |> Jsont.Object.mem "verse_divergences" Jsont.int ~enc:(fun s -> 232 + s.verse_divergences) 218 233 |> Jsont.Object.finish 219 234 220 235 let action_jsont = ··· 222 237 { action_priority = priority_of_string priority; description; command } 223 238 in 224 239 Jsont.Object.map ~kind:"action" make 225 - |> Jsont.Object.mem "priority" Jsont.string ~enc:(fun a -> priority_to_string a.action_priority) 240 + |> Jsont.Object.mem "priority" Jsont.string ~enc:(fun a -> 241 + priority_to_string a.action_priority) 226 242 |> Jsont.Object.mem "action" Jsont.string ~enc:(fun a -> a.description) 227 - |> Jsont.Object.mem "command" (Jsont.option Jsont.string) ~dec_absent:None ~enc:(fun a -> a.command) 243 + |> Jsont.Object.mem "command" (Jsont.option Jsont.string) ~dec_absent:None 244 + ~enc:(fun a -> a.command) 228 245 |> Jsont.Object.finish 229 246 230 247 let report_jsont = ··· 234 251 Jsont.Object.map ~kind:"report" make 235 252 |> Jsont.Object.mem "timestamp" Jsont.string ~enc:(fun r -> r.timestamp) 236 253 |> Jsont.Object.mem "workspace" Jsont.string ~enc:(fun r -> r.workspace) 237 - |> Jsont.Object.mem "summary" report_summary_jsont ~enc:(fun r -> r.report_summary) 238 - |> Jsont.Object.mem "repos" (Jsont.list repo_sync_jsont) ~enc:(fun r -> r.repos) 239 - |> Jsont.Object.mem "recommendations" (Jsont.list action_jsont) ~enc:(fun r -> r.recommendations) 240 - |> Jsont.Object.mem "warnings" (Jsont.list Jsont.string) ~enc:(fun r -> r.warnings) 254 + |> Jsont.Object.mem "summary" report_summary_jsont ~enc:(fun r -> 255 + r.report_summary) 256 + |> Jsont.Object.mem "repos" (Jsont.list repo_sync_jsont) ~enc:(fun r -> 257 + r.repos) 258 + |> Jsont.Object.mem "recommendations" (Jsont.list action_jsont) ~enc:(fun r -> 259 + r.recommendations) 260 + |> Jsont.Object.mem "warnings" (Jsont.list Jsont.string) ~enc:(fun r -> 261 + r.warnings) 241 262 |> Jsont.Object.finish 242 263 243 264 (** {1 Text Rendering} *) ··· 271 292 272 293 let pp_commit_analysis ppf c = 273 294 Fmt.pf ppf " [%a] %s %s@." pp_priority c.priority c.hash c.subject; 274 - Fmt.pf ppf " Category: %a | Risk: %a | Action: %a@." 275 - pp_category c.category 276 - pp_conflict_risk c.conflict_risk 277 - pp_recommendation c.recommendation; 278 - if c.commit_summary <> "" then 279 - Fmt.pf ppf " -> %s@." c.commit_summary 295 + Fmt.pf ppf " Category: %a | Risk: %a | Action: %a@." pp_category 296 + c.category pp_conflict_risk c.conflict_risk pp_recommendation 297 + c.recommendation; 298 + if c.commit_summary <> "" then Fmt.pf ppf " -> %s@." c.commit_summary 280 299 281 300 let pp_verse_analysis ppf v = 282 - Fmt.pf ppf "@. Their commits from %s (%d):@.@." v.handle (List.length v.commits); 301 + Fmt.pf ppf "@. Their commits from %s (%d):@.@." v.handle 302 + (List.length v.commits); 283 303 List.iter (pp_commit_analysis ppf) v.commits; 284 304 match v.suggested_action with 285 305 | Some cmd -> Fmt.pf ppf "@. Suggested: %s@." cmd 286 306 | None -> () 287 307 288 308 let pp_repo_sync ppf r = 289 - let local_str = match r.local_sync with 309 + let local_str = 310 + match r.local_sync with 290 311 | `In_sync -> "=" 291 312 | `Ahead n -> Printf.sprintf "+%d" n 292 313 | `Behind n -> Printf.sprintf "-%d" n 293 314 | `Needs_sync -> "sync" 294 315 in 295 316 Fmt.pf ppf "@.%a (local:%s, remote:+%d/-%d)@." 296 - Fmt.(styled `Bold string) r.name local_str r.remote_ahead r.remote_behind; 317 + Fmt.(styled `Bold string) 318 + r.name local_str r.remote_ahead r.remote_behind; 297 319 if r.verse_analyses <> [] then 298 320 List.iter (pp_verse_analysis ppf) r.verse_analyses 299 321 300 322 let pp_action ppf a = 301 323 Fmt.pf ppf " [%a] %s@." pp_priority a.action_priority a.description; 302 - match a.command with 303 - | Some cmd -> Fmt.pf ppf " $ %s@." cmd 304 - | None -> () 324 + match a.command with Some cmd -> Fmt.pf ppf " $ %s@." cmd | None -> () 305 325 306 326 let pp_report ppf r = 307 327 Fmt.pf ppf "@.=== Monopam Doctor Report ===@."; ··· 313 333 Fmt.pf ppf " %d verse divergences@." r.report_summary.verse_divergences; 314 334 315 335 (* Only show repos with issues *) 316 - let repos_with_issues = List.filter (fun r -> 317 - r.local_sync <> `In_sync || 318 - r.remote_behind > 0 || 319 - r.verse_analyses <> []) 320 - r.repos 336 + let repos_with_issues = 337 + List.filter 338 + (fun r -> 339 + r.local_sync <> `In_sync || r.remote_behind > 0 340 + || r.verse_analyses <> []) 341 + r.repos 321 342 in 322 343 if repos_with_issues <> [] then begin 323 344 Fmt.pf ppf "@.---@."; ··· 337 358 338 359 (** {1 Claude Analysis} *) 339 360 340 - (** Information about a single remote's status *) 341 361 type remote_status = { 342 362 remote_name : string; 343 363 url : string; 344 364 ahead : int; [@warning "-69"] (** Commits we have that remote doesn't *) 345 365 behind : int; (** Commits remote has that we don't *) 346 - incoming_commits : Git.log_entry list; (** Commits from remote we don't have *) 366 + incoming_commits : Git.log_entry list; 367 + (** Commits from remote we don't have *) 347 368 } 369 + (** Information about a single remote's status *) 348 370 349 371 (** Analyze a single remote for a checkout *) 350 372 let analyze_remote ~proc ~fs ~checkout_dir ~remote_name = 351 - let url = match Git.get_remote_url ~proc ~fs ~remote:remote_name checkout_dir with 373 + let url = 374 + match Git.get_remote_url ~proc ~fs ~remote:remote_name checkout_dir with 352 375 | Some u -> u 353 376 | None -> "(unknown)" 354 377 in 355 378 (* Try to get ahead/behind for this remote *) 356 - let (ahead, behind) = match Git.ahead_behind ~proc ~fs ~remote:remote_name checkout_dir with 379 + let ahead, behind = 380 + match Git.ahead_behind ~proc ~fs ~remote:remote_name checkout_dir with 357 381 | Ok ab -> (ab.ahead, ab.behind) 358 382 | Error _ -> (0, 0) 359 383 in ··· 361 385 let incoming_commits = 362 386 if behind > 0 then 363 387 let tip = Printf.sprintf "%s/main" remote_name in 364 - match Git.log_range ~proc ~fs ~base:"HEAD" ~tip ~max_count:20 checkout_dir with 388 + match 389 + Git.log_range ~proc ~fs ~base:"HEAD" ~tip ~max_count:20 checkout_dir 390 + with 365 391 | Ok commits -> commits 366 - | Error _ -> 392 + | Error _ -> ( 367 393 (* Try with master branch *) 368 - (match Git.log_range ~proc ~fs ~base:"HEAD" ~tip:(Printf.sprintf "%s/master" remote_name) 369 - ~max_count:20 checkout_dir with 394 + match 395 + Git.log_range ~proc ~fs ~base:"HEAD" 396 + ~tip:(Printf.sprintf "%s/master" remote_name) 397 + ~max_count:20 checkout_dir 398 + with 370 399 | Ok commits -> commits 371 400 | Error _ -> []) 372 401 else [] ··· 376 405 (** Analyze all remotes for a checkout *) 377 406 let analyze_checkout_remotes ~proc ~fs ~checkout_dir = 378 407 let remotes = Git.list_remotes ~proc ~fs checkout_dir in 379 - List.map (fun remote_name -> 380 - analyze_remote ~proc ~fs ~checkout_dir ~remote_name) 408 + List.map 409 + (fun remote_name -> analyze_remote ~proc ~fs ~checkout_dir ~remote_name) 381 410 remotes 382 411 383 412 (** Strip ANSI escape codes from a string *) ··· 400 429 in 401 430 loop 0 402 431 403 - (** Build status summary for prompt - includes formatted monopam status output *) 432 + (** Build status summary for prompt - includes formatted monopam status output 433 + *) 404 434 let build_status_summary statuses = 405 435 let buf = Buffer.create 4096 in 406 436 Buffer.add_string buf "## Current Monorepo Status\n\n"; ··· 410 440 Buffer.add_string buf (strip_ansi fmt_output); 411 441 Buffer.add_string buf "```\n\n"; 412 442 Buffer.add_string buf "Detailed status per repository:\n"; 413 - List.iter (fun (status : Status.t) -> 443 + List.iter 444 + (fun (status : Status.t) -> 414 445 let name = Package.repo_name status.package in 415 - let local_str = match status.subtree_sync with 446 + let local_str = 447 + match status.subtree_sync with 416 448 | Status.In_sync -> "local:=" 417 449 | Status.Subtree_behind n -> Printf.sprintf "local:-%d" n 418 450 | Status.Subtree_ahead n -> Printf.sprintf "local:+%d" n 419 451 | Status.Trees_differ -> "local:sync" 420 452 | Status.Unknown -> "local:?" 421 453 in 422 - let remote_str = match status.checkout with 454 + let remote_str = 455 + match status.checkout with 423 456 | Status.Clean ab -> 424 457 if ab.ahead > 0 && ab.behind > 0 then 425 458 Printf.sprintf "remote:+%d/-%d" ab.ahead ab.behind 426 - else if ab.ahead > 0 then 427 - Printf.sprintf "remote:+%d" ab.ahead 428 - else if ab.behind > 0 then 429 - Printf.sprintf "remote:-%d" ab.behind 459 + else if ab.ahead > 0 then Printf.sprintf "remote:+%d" ab.ahead 460 + else if ab.behind > 0 then Printf.sprintf "remote:-%d" ab.behind 430 461 else "remote:=" 431 462 | Status.Dirty -> "remote:dirty" 432 463 | Status.Missing -> "remote:missing" 433 464 | Status.Not_a_repo -> "remote:not-repo" 434 465 in 435 - Buffer.add_string buf (Printf.sprintf "- %s: %s %s\n" name local_str remote_str)) 466 + Buffer.add_string buf 467 + (Printf.sprintf "- %s: %s %s\n" name local_str remote_str)) 436 468 statuses; 437 469 Buffer.contents buf 438 470 ··· 440 472 let build_incoming_summary remotes_by_repo = 441 473 let buf = Buffer.create 8192 in 442 474 Buffer.add_string buf "\n## Incoming Commits from Remotes\n\n"; 443 - List.iter (fun (repo_name, remotes) -> 475 + List.iter 476 + (fun (repo_name, remotes) -> 444 477 let has_incoming = List.exists (fun r -> r.behind > 0) remotes in 445 478 if has_incoming then begin 446 479 Buffer.add_string buf (Printf.sprintf "### %s\n\n" repo_name); 447 - List.iter (fun r -> 480 + List.iter 481 + (fun r -> 448 482 if r.behind > 0 then begin 449 - Buffer.add_string buf (Printf.sprintf "**%s** (%s) - %d commits behind:\n" 450 - r.remote_name r.url r.behind); 451 - List.iter (fun (c : Git.log_entry) -> 452 - let short_hash = String.sub c.hash 0 (min 7 (String.length c.hash)) in 453 - Buffer.add_string buf (Printf.sprintf " - %s %s (%s)\n" 454 - short_hash c.subject c.author)) 483 + Buffer.add_string buf 484 + (Printf.sprintf "**%s** (%s) - %d commits behind:\n" 485 + r.remote_name r.url r.behind); 486 + List.iter 487 + (fun (c : Git.log_entry) -> 488 + let short_hash = 489 + String.sub c.hash 0 (min 7 (String.length c.hash)) 490 + in 491 + Buffer.add_string buf 492 + (Printf.sprintf " - %s %s (%s)\n" short_hash c.subject 493 + c.author)) 455 494 r.incoming_commits; 456 495 Buffer.add_string buf "\n" 457 496 end) ··· 461 500 Buffer.contents buf 462 501 463 502 (** Analyze all incoming commits using Claude *) 464 - let analyze_with_claude ~sw ~process_mgr ~clock ~status_summary ~incoming_summary = 503 + let analyze_with_claude ~sw ~process_mgr ~clock ~status_summary 504 + ~incoming_summary = 465 505 let prompt = Buffer.create 16384 in 466 - Buffer.add_string prompt {|You are analyzing a monorepo workspace to provide actionable recommendations. 506 + Buffer.add_string prompt 507 + {|You are analyzing a monorepo workspace to provide actionable recommendations. 467 508 468 509 IMPORTANT: The workspace has already been synced and the status output is provided below. 469 510 You do NOT need to run `monopam status` or `monopam sync` - this has already been done. ··· 472 513 |}; 473 514 Buffer.add_string prompt status_summary; 474 515 Buffer.add_string prompt incoming_summary; 475 - Buffer.add_string prompt {| 516 + Buffer.add_string prompt 517 + {| 476 518 477 519 ## Instructions 478 520 ··· 506 548 507 549 let output_schema = 508 550 let open Jsont in 509 - let commit_schema = Object ([ 510 - (("type", Meta.none), String ("object", Meta.none)); 511 - (("properties", Meta.none), Object ([ 512 - (("hash", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 513 - (("subject", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 514 - (("author", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 515 - (("date", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 516 - (("category", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 517 - (("priority", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 518 - (("recommendation", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 519 - (("conflict_risk", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 520 - (("summary", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 521 - ], Meta.none)); 522 - ], Meta.none) 551 + let commit_schema = 552 + Object 553 + ( [ 554 + (("type", Meta.none), String ("object", Meta.none)); 555 + ( ("properties", Meta.none), 556 + Object 557 + ( [ 558 + ( ("hash", Meta.none), 559 + Object 560 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 561 + Meta.none ) ); 562 + ( ("subject", Meta.none), 563 + Object 564 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 565 + Meta.none ) ); 566 + ( ("author", Meta.none), 567 + Object 568 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 569 + Meta.none ) ); 570 + ( ("date", Meta.none), 571 + Object 572 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 573 + Meta.none ) ); 574 + ( ("category", Meta.none), 575 + Object 576 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 577 + Meta.none ) ); 578 + ( ("priority", Meta.none), 579 + Object 580 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 581 + Meta.none ) ); 582 + ( ("recommendation", Meta.none), 583 + Object 584 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 585 + Meta.none ) ); 586 + ( ("conflict_risk", Meta.none), 587 + Object 588 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 589 + Meta.none ) ); 590 + ( ("summary", Meta.none), 591 + Object 592 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 593 + Meta.none ) ); 594 + ], 595 + Meta.none ) ); 596 + ], 597 + Meta.none ) 523 598 in 524 - let verse_schema = Object ([ 525 - (("type", Meta.none), String ("object", Meta.none)); 526 - (("properties", Meta.none), Object ([ 527 - (("handle", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 528 - (("commits", Meta.none), Object ([ 529 - (("type", Meta.none), String ("array", Meta.none)); 530 - (("items", Meta.none), commit_schema); 531 - ], Meta.none)); 532 - (("suggested_action", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 533 - ], Meta.none)); 534 - ], Meta.none) 599 + let verse_schema = 600 + Object 601 + ( [ 602 + (("type", Meta.none), String ("object", Meta.none)); 603 + ( ("properties", Meta.none), 604 + Object 605 + ( [ 606 + ( ("handle", Meta.none), 607 + Object 608 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 609 + Meta.none ) ); 610 + ( ("commits", Meta.none), 611 + Object 612 + ( [ 613 + (("type", Meta.none), String ("array", Meta.none)); 614 + (("items", Meta.none), commit_schema); 615 + ], 616 + Meta.none ) ); 617 + ( ("suggested_action", Meta.none), 618 + Object 619 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 620 + Meta.none ) ); 621 + ], 622 + Meta.none ) ); 623 + ], 624 + Meta.none ) 535 625 in 536 - let repo_schema = Object ([ 537 - (("type", Meta.none), String ("object", Meta.none)); 538 - (("properties", Meta.none), Object ([ 539 - (("name", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 540 - (("verse_analyses", Meta.none), Object ([ 541 - (("type", Meta.none), String ("array", Meta.none)); 542 - (("items", Meta.none), verse_schema); 543 - ], Meta.none)); 544 - ], Meta.none)); 545 - ], Meta.none) 626 + let repo_schema = 627 + Object 628 + ( [ 629 + (("type", Meta.none), String ("object", Meta.none)); 630 + ( ("properties", Meta.none), 631 + Object 632 + ( [ 633 + ( ("name", Meta.none), 634 + Object 635 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 636 + Meta.none ) ); 637 + ( ("verse_analyses", Meta.none), 638 + Object 639 + ( [ 640 + (("type", Meta.none), String ("array", Meta.none)); 641 + (("items", Meta.none), verse_schema); 642 + ], 643 + Meta.none ) ); 644 + ], 645 + Meta.none ) ); 646 + ], 647 + Meta.none ) 546 648 in 547 - let action_schema = Object ([ 548 - (("type", Meta.none), String ("object", Meta.none)); 549 - (("properties", Meta.none), Object ([ 550 - (("priority", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 551 - (("action", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 552 - (("command", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 553 - ], Meta.none)); 554 - ], Meta.none) 649 + let action_schema = 650 + Object 651 + ( [ 652 + (("type", Meta.none), String ("object", Meta.none)); 653 + ( ("properties", Meta.none), 654 + Object 655 + ( [ 656 + ( ("priority", Meta.none), 657 + Object 658 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 659 + Meta.none ) ); 660 + ( ("action", Meta.none), 661 + Object 662 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 663 + Meta.none ) ); 664 + ( ("command", Meta.none), 665 + Object 666 + ( [ (("type", Meta.none), String ("string", Meta.none)) ], 667 + Meta.none ) ); 668 + ], 669 + Meta.none ) ); 670 + ], 671 + Meta.none ) 555 672 in 556 - Object ([ 557 - (("type", Meta.none), String ("object", Meta.none)); 558 - (("properties", Meta.none), Object ([ 559 - (("repos", Meta.none), Object ([ 560 - (("type", Meta.none), String ("array", Meta.none)); 561 - (("items", Meta.none), repo_schema); 562 - ], Meta.none)); 563 - (("recommendations", Meta.none), Object ([ 564 - (("type", Meta.none), String ("array", Meta.none)); 565 - (("items", Meta.none), action_schema); 566 - ], Meta.none)); 567 - (("warnings", Meta.none), Object ([ 568 - (("type", Meta.none), String ("array", Meta.none)); 569 - (("items", Meta.none), Object ([(("type", Meta.none), String ("string", Meta.none))], Meta.none)); 570 - ], Meta.none)); 571 - ], Meta.none)); 572 - (("required", Meta.none), Array ([ 573 - String ("repos", Meta.none); 574 - String ("recommendations", Meta.none); 575 - String ("warnings", Meta.none); 576 - ], Meta.none)); 577 - ], Meta.none) 673 + Object 674 + ( [ 675 + (("type", Meta.none), String ("object", Meta.none)); 676 + ( ("properties", Meta.none), 677 + Object 678 + ( [ 679 + ( ("repos", Meta.none), 680 + Object 681 + ( [ 682 + (("type", Meta.none), String ("array", Meta.none)); 683 + (("items", Meta.none), repo_schema); 684 + ], 685 + Meta.none ) ); 686 + ( ("recommendations", Meta.none), 687 + Object 688 + ( [ 689 + (("type", Meta.none), String ("array", Meta.none)); 690 + (("items", Meta.none), action_schema); 691 + ], 692 + Meta.none ) ); 693 + ( ("warnings", Meta.none), 694 + Object 695 + ( [ 696 + (("type", Meta.none), String ("array", Meta.none)); 697 + ( ("items", Meta.none), 698 + Object 699 + ( [ 700 + ( ("type", Meta.none), 701 + String ("string", Meta.none) ); 702 + ], 703 + Meta.none ) ); 704 + ], 705 + Meta.none ) ); 706 + ], 707 + Meta.none ) ); 708 + ( ("required", Meta.none), 709 + Array 710 + ( [ 711 + String ("repos", Meta.none); 712 + String ("recommendations", Meta.none); 713 + String ("warnings", Meta.none); 714 + ], 715 + Meta.none ) ); 716 + ], 717 + Meta.none ) 718 + in 719 + let output_format = 720 + Claude.Proto.Structured_output.of_json_schema output_schema 578 721 in 579 - let output_format = Claude.Proto.Structured_output.of_json_schema output_schema in 580 722 let options = 581 - Claude.Options.default 582 - |> Claude.Options.with_output_format output_format 723 + Claude.Options.default |> Claude.Options.with_output_format output_format 583 724 in 584 725 585 726 let client = Claude.Client.create ~sw ~process_mgr ~clock ~options () in ··· 587 728 588 729 (* Stream Claude's activity to console *) 589 730 let result = ref None in 590 - let handler = object 591 - inherit Claude.Handler.default 731 + let handler = 732 + object 733 + inherit Claude.Handler.default 592 734 593 - method! on_text t = 594 - let content = Claude.Response.Text.content t in 595 - if String.length content > 0 then 596 - Log.app (fun m -> m "Claude: %s" content) 735 + method! on_text t = 736 + let content = Claude.Response.Text.content t in 737 + if String.length content > 0 then 738 + Log.app (fun m -> m "Claude: %s" content) 597 739 598 - method! on_tool_use t = 599 - let name = Claude.Response.Tool_use.name t in 600 - let input = Claude.Response.Tool_use.input t in 601 - (* Show tool being used with key parameters *) 602 - (match name with 603 - | "Bash" -> 604 - let cmd = Claude.Tool_input.get_string input "command" |> Option.value ~default:"" in 605 - let short_cmd = if String.length cmd > 60 then String.sub cmd 0 57 ^ "..." else cmd in 606 - Log.app (fun m -> m " [Bash] %s" short_cmd) 607 - | "Read" -> 608 - let path = Claude.Tool_input.get_string input "file_path" |> Option.value ~default:"" in 609 - Log.app (fun m -> m " [Read] %s" path) 610 - | "Grep" -> 611 - let pattern = Claude.Tool_input.get_string input "pattern" |> Option.value ~default:"" in 612 - Log.app (fun m -> m " [Grep] %s" pattern) 613 - | "Glob" -> 614 - let pattern = Claude.Tool_input.get_string input "pattern" |> Option.value ~default:"" in 615 - Log.app (fun m -> m " [Glob] %s" pattern) 616 - | _ -> 617 - Log.app (fun m -> m " [%s]" name)) 740 + method! on_tool_use t = 741 + let name = Claude.Response.Tool_use.name t in 742 + let input = Claude.Response.Tool_use.input t in 743 + (* Show tool being used with key parameters *) 744 + match name with 745 + | "Bash" -> 746 + let cmd = 747 + Claude.Tool_input.get_string input "command" 748 + |> Option.value ~default:"" 749 + in 750 + let short_cmd = 751 + if String.length cmd > 60 then String.sub cmd 0 57 ^ "..." 752 + else cmd 753 + in 754 + Log.app (fun m -> m " [Bash] %s" short_cmd) 755 + | "Read" -> 756 + let path = 757 + Claude.Tool_input.get_string input "file_path" 758 + |> Option.value ~default:"" 759 + in 760 + Log.app (fun m -> m " [Read] %s" path) 761 + | "Grep" -> 762 + let pattern = 763 + Claude.Tool_input.get_string input "pattern" 764 + |> Option.value ~default:"" 765 + in 766 + Log.app (fun m -> m " [Grep] %s" pattern) 767 + | "Glob" -> 768 + let pattern = 769 + Claude.Tool_input.get_string input "pattern" 770 + |> Option.value ~default:"" 771 + in 772 + Log.app (fun m -> m " [Glob] %s" pattern) 773 + | _ -> Log.app (fun m -> m " [%s]" name) 618 774 619 - method! on_complete c = 620 - match Claude.Response.Complete.structured_output c with 621 - | Some json -> result := Some json 622 - | None -> Log.warn (fun m -> m "No structured output from Claude") 775 + method! on_complete c = 776 + match Claude.Response.Complete.structured_output c with 777 + | Some json -> result := Some json 778 + | None -> Log.warn (fun m -> m "No structured output from Claude") 623 779 624 - method! on_error e = 625 - Log.warn (fun m -> m "Claude error: %s" (Claude.Response.Error.message e)) 626 - end in 780 + method! on_error e = 781 + Log.warn (fun m -> 782 + m "Claude error: %s" (Claude.Response.Error.message e)) 783 + end 784 + in 627 785 628 786 Claude.Client.run client ~handler; 629 787 !result ··· 655 813 (match json with 656 814 | Jsont.Object (obj, _) -> 657 815 (* Parse repos *) 658 - List.iter (fun repo_json -> 816 + List.iter 817 + (fun repo_json -> 659 818 match repo_json with 660 819 | Jsont.Object (repo_obj, _) -> 661 820 let name = get_string repo_obj "name" "" in 662 - let verse_analyses = List.filter_map (fun va_json -> 663 - match va_json with 664 - | Jsont.Object (va_obj, _) -> 665 - let handle = get_string va_obj "handle" "" in 666 - let commits = List.filter_map (fun c_json -> 667 - match c_json with 668 - | Jsont.Object (c_obj, _) -> 669 - Some { 670 - hash = get_string c_obj "hash" ""; 671 - subject = get_string c_obj "subject" ""; 672 - author = get_string c_obj "author" ""; 673 - date = get_string c_obj "date" ""; 674 - category = change_category_of_string (get_string c_obj "category" "other"); 675 - priority = priority_of_string (get_string c_obj "priority" "low"); 676 - recommendation = recommendation_of_string (get_string c_obj "recommendation" "review-first"); 677 - conflict_risk = conflict_risk_of_string (get_string c_obj "conflict_risk" "low"); 678 - commit_summary = get_string c_obj "summary" ""; 679 - } 680 - | _ -> None) 681 - (get_array va_obj "commits") 682 - in 683 - let suggested_action = get_string_opt va_obj "suggested_action" in 684 - Some { handle; commits; suggested_action } 685 - | _ -> None) 686 - (get_array repo_obj "verse_analyses") 821 + let verse_analyses = 822 + List.filter_map 823 + (fun va_json -> 824 + match va_json with 825 + | Jsont.Object (va_obj, _) -> 826 + let handle = get_string va_obj "handle" "" in 827 + let commits = 828 + List.filter_map 829 + (fun c_json -> 830 + match c_json with 831 + | Jsont.Object (c_obj, _) -> 832 + Some 833 + { 834 + hash = get_string c_obj "hash" ""; 835 + subject = get_string c_obj "subject" ""; 836 + author = get_string c_obj "author" ""; 837 + date = get_string c_obj "date" ""; 838 + category = 839 + change_category_of_string 840 + (get_string c_obj "category" "other"); 841 + priority = 842 + priority_of_string 843 + (get_string c_obj "priority" "low"); 844 + recommendation = 845 + recommendation_of_string 846 + (get_string c_obj "recommendation" 847 + "review-first"); 848 + conflict_risk = 849 + conflict_risk_of_string 850 + (get_string c_obj "conflict_risk" 851 + "low"); 852 + commit_summary = 853 + get_string c_obj "summary" ""; 854 + } 855 + | _ -> None) 856 + (get_array va_obj "commits") 857 + in 858 + let suggested_action = 859 + get_string_opt va_obj "suggested_action" 860 + in 861 + Some { handle; commits; suggested_action } 862 + | _ -> None) 863 + (get_array repo_obj "verse_analyses") 687 864 in 688 865 if verse_analyses <> [] then 689 - repos := { name; local_sync = `In_sync; remote_ahead = 0; remote_behind = 0; verse_analyses } :: !repos 866 + repos := 867 + { 868 + name; 869 + local_sync = `In_sync; 870 + remote_ahead = 0; 871 + remote_behind = 0; 872 + verse_analyses; 873 + } 874 + :: !repos 690 875 | _ -> ()) 691 876 (get_array obj "repos"); 692 877 693 878 (* Parse recommendations *) 694 - List.iter (fun rec_json -> 879 + List.iter 880 + (fun rec_json -> 695 881 match rec_json with 696 882 | Jsont.Object (rec_obj, _) -> 697 - let action_priority = priority_of_string (get_string rec_obj "priority" "low") in 883 + let action_priority = 884 + priority_of_string (get_string rec_obj "priority" "low") 885 + in 698 886 let description = get_string rec_obj "action" "" in 699 887 let command = get_string_opt rec_obj "command" in 700 - recommendations := { action_priority; description; command } :: !recommendations 888 + recommendations := 889 + { action_priority; description; command } :: !recommendations 701 890 | _ -> ()) 702 891 (get_array obj "recommendations"); 703 892 704 893 (* Parse warnings *) 705 - List.iter (fun w_json -> 894 + List.iter 895 + (fun w_json -> 706 896 match w_json with 707 897 | Jsont.String (s, _) -> warnings := s :: !warnings 708 898 | _ -> ()) ··· 714 904 (** {1 Main Analysis} *) 715 905 716 906 (** Run the doctor analysis *) 717 - let analyze 718 - ~proc ~fs ~config ~verse_config ~clock 719 - ?package ?(no_sync=false) () = 720 - let _ = no_sync in (* Sync is run at CLI level before calling analyze *) 907 + let analyze ~proc ~fs ~config ~verse_config ~clock ?package ?(no_sync = false) 908 + () = 909 + let _ = no_sync in 910 + (* Sync is run at CLI level before calling analyze *) 721 911 let now = Eio.Time.now clock in 722 - let now_ptime = match Ptime.of_float_s now with 723 - | Some t -> t 724 - | None -> Ptime.v (0, 0L) 912 + let now_ptime = 913 + match Ptime.of_float_s now with Some t -> t | None -> Ptime.v (0, 0L) 725 914 in 726 915 let timestamp = Ptime.to_rfc3339 now_ptime ~tz_offset_s:0 in 727 916 let workspace = Fpath.to_string (Verse_config.root verse_config) in 728 917 729 918 (* Get status for all packages *) 730 - let packages = match Opam_repo.scan ~fs:(fs :> _ Eio.Path.t) (Config.Paths.opam_repo config) with 919 + let packages = 920 + match 921 + Opam_repo.scan ~fs:(fs :> _ Eio.Path.t) (Config.Paths.opam_repo config) 922 + with 731 923 | Ok pkgs -> pkgs 732 924 | Error _ -> [] 733 925 in 734 926 let statuses = Status.compute_all ~proc ~fs ~config packages in 735 927 736 928 (* Filter by package if specified *) 737 - let statuses = match package with 929 + let statuses = 930 + match package with 738 931 | None -> statuses 739 - | Some name -> List.filter (fun (s : Status.t) -> Package.name s.package = name) statuses 932 + | Some name -> 933 + List.filter 934 + (fun (s : Status.t) -> Package.name s.package = name) 935 + statuses 740 936 in 741 937 742 938 (* Build warnings list *) ··· 753 949 warnings := "monorepo has uncommitted changes" :: !warnings; 754 950 755 951 (* Analyze all remotes for each checkout *) 756 - Log.app (fun m -> m "Analyzing remotes for %d repositories..." (List.length statuses)); 952 + Log.app (fun m -> 953 + m "Analyzing remotes for %d repositories..." (List.length statuses)); 757 954 let checkouts_root = Config.Paths.checkouts config in 758 - let remotes_by_repo = List.filter_map (fun (status : Status.t) -> 759 - let name = Package.repo_name status.package in 760 - let checkout_dir = Fpath.(checkouts_root / name) in 761 - match status.checkout with 762 - | Status.Missing | Status.Not_a_repo -> None 763 - | _ -> 764 - let remotes = analyze_checkout_remotes ~proc ~fs ~checkout_dir in 765 - Some (name, remotes)) 766 - statuses 955 + let remotes_by_repo = 956 + List.filter_map 957 + (fun (status : Status.t) -> 958 + let name = Package.repo_name status.package in 959 + let checkout_dir = Fpath.(checkouts_root / name) in 960 + match status.checkout with 961 + | Status.Missing | Status.Not_a_repo -> None 962 + | _ -> 963 + let remotes = analyze_checkout_remotes ~proc ~fs ~checkout_dir in 964 + Some (name, remotes)) 965 + statuses 767 966 in 768 967 769 968 (* Count repos with incoming changes *) 770 - let repos_with_incoming = List.filter (fun (_name, remotes) -> 771 - List.exists (fun r -> r.behind > 0) remotes) 772 - remotes_by_repo 969 + let repos_with_incoming = 970 + List.filter 971 + (fun (_name, remotes) -> List.exists (fun r -> r.behind > 0) remotes) 972 + remotes_by_repo 773 973 in 774 974 775 975 (* Build repo sync info from status *) 776 - let base_repos = List.map (fun (status : Status.t) -> 777 - let name = Package.repo_name status.package in 778 - let local_sync = match status.subtree_sync with 779 - | Status.In_sync -> `In_sync 780 - | Status.Subtree_behind n -> `Behind n 781 - | Status.Subtree_ahead n -> `Ahead n 782 - | Status.Trees_differ -> `Needs_sync 783 - | Status.Unknown -> `Needs_sync 784 - in 785 - let (remote_ahead, remote_behind) = match status.checkout with 786 - | Status.Clean ab -> (ab.ahead, ab.behind) 787 - | _ -> (0, 0) 788 - in 789 - { name; local_sync; remote_ahead; remote_behind; verse_analyses = [] }) 790 - statuses 976 + let base_repos = 977 + List.map 978 + (fun (status : Status.t) -> 979 + let name = Package.repo_name status.package in 980 + let local_sync = 981 + match status.subtree_sync with 982 + | Status.In_sync -> `In_sync 983 + | Status.Subtree_behind n -> `Behind n 984 + | Status.Subtree_ahead n -> `Ahead n 985 + | Status.Trees_differ -> `Needs_sync 986 + | Status.Unknown -> `Needs_sync 987 + in 988 + let remote_ahead, remote_behind = 989 + match status.checkout with 990 + | Status.Clean ab -> (ab.ahead, ab.behind) 991 + | _ -> (0, 0) 992 + in 993 + { name; local_sync; remote_ahead; remote_behind; verse_analyses = [] }) 994 + statuses 791 995 in 792 996 793 997 (* If there are repos with incoming changes, analyze with Claude *) 794 - let (repos, claude_recommendations, claude_warnings) = 998 + let repos, claude_recommendations, claude_warnings = 795 999 if repos_with_incoming <> [] then begin 796 - Log.app (fun m -> m "Found %d repos with incoming changes, analyzing with Claude..." 797 - (List.length repos_with_incoming)); 1000 + Log.app (fun m -> 1001 + m "Found %d repos with incoming changes, analyzing with Claude..." 1002 + (List.length repos_with_incoming)); 798 1003 let status_summary = build_status_summary statuses in 799 1004 let incoming_summary = build_incoming_summary remotes_by_repo in 800 1005 801 - match Eio.Switch.run (fun sw -> 802 - analyze_with_claude ~sw ~process_mgr:proc ~clock ~status_summary ~incoming_summary) 1006 + match 1007 + Eio.Switch.run (fun sw -> 1008 + analyze_with_claude ~sw ~process_mgr:proc ~clock ~status_summary 1009 + ~incoming_summary) 803 1010 with 804 1011 | Some json -> 805 - let (claude_repos, recs, warns) = parse_claude_response json in 1012 + let claude_repos, recs, warns = parse_claude_response json in 806 1013 (* Merge Claude repos with base repos *) 807 - let merged_repos = List.map (fun base_repo -> 808 - match List.find_opt (fun cr -> cr.name = base_repo.name) claude_repos with 809 - | Some cr -> { base_repo with verse_analyses = cr.verse_analyses } 810 - | None -> base_repo) 811 - base_repos 1014 + let merged_repos = 1015 + List.map 1016 + (fun base_repo -> 1017 + match 1018 + List.find_opt 1019 + (fun cr -> cr.name = base_repo.name) 1020 + claude_repos 1021 + with 1022 + | Some cr -> 1023 + { base_repo with verse_analyses = cr.verse_analyses } 1024 + | None -> base_repo) 1025 + base_repos 812 1026 in 813 1027 (merged_repos, recs, warns) 814 1028 | None -> 815 1029 Log.warn (fun m -> m "Claude analysis failed, using basic status"); 816 1030 (base_repos, [], []) 817 - end else begin 1031 + end 1032 + else begin 818 1033 Log.app (fun m -> m "No incoming changes from remotes"); 819 1034 (base_repos, [], []) 820 1035 end 821 1036 in 822 - 823 1037 824 1038 (* Compute summary *) 825 - let repos_need_sync = List.length (List.filter (fun r -> r.local_sync <> `In_sync) repos) in 826 - let repos_behind_upstream = List.length (List.filter (fun r -> r.remote_behind > 0) repos) in 827 - let verse_divergences = List.fold_left (fun acc r -> acc + List.length r.verse_analyses) 0 repos in 828 - let report_summary = { 829 - repos_total = List.length repos; 830 - repos_need_sync; 831 - repos_behind_upstream; 832 - verse_divergences; 833 - } in 1039 + let repos_need_sync = 1040 + List.length (List.filter (fun r -> r.local_sync <> `In_sync) repos) 1041 + in 1042 + let repos_behind_upstream = 1043 + List.length (List.filter (fun r -> r.remote_behind > 0) repos) 1044 + in 1045 + let verse_divergences = 1046 + List.fold_left (fun acc r -> acc + List.length r.verse_analyses) 0 repos 1047 + in 1048 + let report_summary = 1049 + { 1050 + repos_total = List.length repos; 1051 + repos_need_sync; 1052 + repos_behind_upstream; 1053 + verse_divergences; 1054 + } 1055 + in 834 1056 835 1057 (* Build recommendations: start with Claude's, add our own *) 836 1058 let recommendations = ref claude_recommendations in 837 1059 838 1060 (* Add recommendations for local sync issues *) 839 - if repos_need_sync > 0 && not (List.exists (fun r -> 840 - String.starts_with ~prefix:"Run monopam sync" r.description) !recommendations) then 841 - recommendations := { 842 - action_priority = Medium; 843 - description = Printf.sprintf "Run monopam sync to resolve %d local sync issues" repos_need_sync; 844 - command = Some "monopam sync"; 845 - } :: !recommendations; 1061 + if 1062 + repos_need_sync > 0 1063 + && not 1064 + (List.exists 1065 + (fun r -> 1066 + String.starts_with ~prefix:"Run monopam sync" r.description) 1067 + !recommendations) 1068 + then 1069 + recommendations := 1070 + { 1071 + action_priority = Medium; 1072 + description = 1073 + Printf.sprintf "Run monopam sync to resolve %d local sync issues" 1074 + repos_need_sync; 1075 + command = Some "monopam sync"; 1076 + } 1077 + :: !recommendations; 846 1078 847 1079 (* Add recommendations for repos behind upstream *) 848 - if repos_behind_upstream > 0 && not (List.exists (fun r -> 849 - String.starts_with ~prefix:"Pull upstream" r.description) !recommendations) then 850 - recommendations := { 851 - action_priority = Medium; 852 - description = Printf.sprintf "Pull upstream changes for %d repos" repos_behind_upstream; 853 - command = Some "monopam sync"; 854 - } :: !recommendations; 1080 + if 1081 + repos_behind_upstream > 0 1082 + && not 1083 + (List.exists 1084 + (fun r -> String.starts_with ~prefix:"Pull upstream" r.description) 1085 + !recommendations) 1086 + then 1087 + recommendations := 1088 + { 1089 + action_priority = Medium; 1090 + description = 1091 + Printf.sprintf "Pull upstream changes for %d repos" 1092 + repos_behind_upstream; 1093 + command = Some "monopam sync"; 1094 + } 1095 + :: !recommendations; 855 1096 856 1097 (* Sort recommendations by priority *) 857 1098 let priority_order = function 858 - | Critical -> 0 | High -> 1 | Medium -> 2 | Low -> 3 1099 + | Critical -> 0 1100 + | High -> 1 1101 + | Medium -> 2 1102 + | Low -> 3 859 1103 in 860 - let recommendations = List.sort (fun a b -> 861 - compare (priority_order a.action_priority) (priority_order b.action_priority)) 862 - !recommendations 1104 + let recommendations = 1105 + List.sort 1106 + (fun a b -> 1107 + compare 1108 + (priority_order a.action_priority) 1109 + (priority_order b.action_priority)) 1110 + !recommendations 863 1111 in 864 1112 865 1113 let all_warnings = List.rev !warnings @ claude_warnings in 866 - { timestamp; workspace; report_summary; repos; recommendations; warnings = all_warnings } 1114 + { 1115 + timestamp; 1116 + workspace; 1117 + report_summary; 1118 + repos; 1119 + recommendations; 1120 + warnings = all_warnings; 1121 + } 867 1122 868 1123 (** Encode report to JSON string *) 869 1124 let to_json report = 870 - match Jsont_bytesrw.encode_string ~format:Jsont.Indent report_jsont report with 1125 + match 1126 + Jsont_bytesrw.encode_string ~format:Jsont.Indent report_jsont report 1127 + with 871 1128 | Ok s -> s 872 1129 | Error e -> failwith (Printf.sprintf "Failed to encode report: %s" e)
+18 -32
lib/doctor.mli
··· 1 1 (** Doctor command - Claude-powered workspace health analysis. 2 2 3 - Analyzes workspace state, verse member commits, and provides 4 - actionable recommendations for maintaining your monorepo. 3 + Analyzes workspace state, verse member commits, and provides actionable 4 + recommendations for maintaining your monorepo. 5 5 6 6 The doctor command uses Claude AI to analyze commits from verse 7 7 collaborators, categorizing them by type, priority, and risk level. ··· 37 37 | Other 38 38 39 39 (** Priority level for a change *) 40 - type priority = 41 - | Critical 42 - | High 43 - | Medium 44 - | Low 40 + type priority = Critical | High | Medium | Low 45 41 46 42 (** Recommended action for a commit *) 47 - type recommendation = 48 - | Merge_now 49 - | Review_first 50 - | Skip 51 - | Needs_discussion 43 + type recommendation = Merge_now | Review_first | Skip | Needs_discussion 52 44 53 45 (** Risk of conflicts when merging *) 54 - type conflict_risk = 55 - | None_risk 56 - | Low_risk 57 - | Medium_risk 58 - | High_risk 46 + type conflict_risk = None_risk | Low_risk | Medium_risk | High_risk 59 47 60 - (** Analysis of a single commit from a verse member *) 61 48 type commit_analysis = { 62 49 hash : string; 63 50 subject : string; ··· 69 56 conflict_risk : conflict_risk; 70 57 commit_summary : string; 71 58 } 59 + (** Analysis of a single commit from a verse member *) 72 60 73 - (** Analysis of commits from a specific verse member for a repo *) 74 61 type verse_analysis = { 75 62 handle : string; 76 63 commits : commit_analysis list; 77 64 suggested_action : string option; 78 65 } 66 + (** Analysis of commits from a specific verse member for a repo *) 79 67 80 - (** Sync status for a single repository *) 81 68 type repo_sync = { 82 69 name : string; 83 70 local_sync : [ `In_sync | `Ahead of int | `Behind of int | `Needs_sync ]; ··· 85 72 remote_behind : int; 86 73 verse_analyses : verse_analysis list; 87 74 } 75 + (** Sync status for a single repository *) 88 76 89 - (** Summary statistics *) 90 77 type report_summary = { 91 78 repos_total : int; 92 79 repos_need_sync : int; 93 80 repos_behind_upstream : int; 94 81 verse_divergences : int; 95 82 } 83 + (** Summary statistics *) 96 84 97 - (** Actionable recommendation *) 98 85 type action = { 99 86 action_priority : priority; 100 87 description : string; 101 88 command : string option; 102 89 } 90 + (** Actionable recommendation *) 103 91 104 - (** Full doctor report *) 105 92 type report = { 106 93 timestamp : string; 107 94 workspace : string; ··· 110 97 recommendations : action list; 111 98 warnings : string list; 112 99 } 100 + (** Full doctor report *) 113 101 114 102 (** {1 Pretty Printing} *) 115 103 ··· 166 154 By default, runs [monopam sync] first to ensure the workspace is up-to-date 167 155 before analysis. Use [~no_sync:true] to skip the initial sync. 168 156 169 - Performs the following analysis: 170 - 1. Runs sync to update workspace (unless [~no_sync:true]) 171 - 2. Computes status for all packages (or the specified package) 172 - 3. Checks for dirty state in opam-repo and monorepo 173 - 4. Analyzes fork relationships with verse members 174 - 5. Uses Claude AI to categorize and prioritize verse commits 175 - 6. Generates actionable recommendations 157 + Performs the following analysis: 1. Runs sync to update workspace (unless 158 + [~no_sync:true]) 2. Computes status for all packages (or the specified 159 + package) 3. Checks for dirty state in opam-repo and monorepo 4. Analyzes 160 + fork relationships with verse members 5. Uses Claude AI to categorize and 161 + prioritize verse commits 6. Generates actionable recommendations 176 162 177 - The status output from [monopam status] is provided directly to Claude 178 - in the prompt, so Claude doesn't need to run it separately. 163 + The status output from [monopam status] is provided directly to Claude in 164 + the prompt, so Claude doesn't need to run it separately. 179 165 180 166 @param proc Eio process manager 181 167 @param fs Eio filesystem
+14 -1
lib/dune
··· 1 1 (library 2 2 (name monopam) 3 3 (public_name monopam) 4 - (libraries eio tomlt tomlt.eio xdge opam-file-format fmt logs uri fpath claude jsont jsont.bytesrw ptime)) 4 + (libraries 5 + eio 6 + tomlt 7 + tomlt.eio 8 + xdge 9 + opam-file-format 10 + fmt 11 + logs 12 + uri 13 + fpath 14 + claude 15 + jsont 16 + jsont.bytesrw 17 + ptime))
+198 -135
lib/forks.ml
··· 1 1 (** Fork graph discovery via verse opam repos. 2 2 3 - Scans verse opam repos to discover dev-repo URLs, adds git remotes 4 - to local checkouts, and computes fork relationships. *) 3 + Scans verse opam repos to discover dev-repo URLs, adds git remotes to local 4 + checkouts, and computes fork relationships. *) 5 5 6 6 let src = Logs.Src.create "monopam.forks" ~doc:"Fork analysis" 7 + 7 8 module Log = (val Logs.src_log src : Logs.LOG) 8 9 9 - (** A dev-repo source from a specific member *) 10 10 type repo_source = { 11 - handle : string; (** Member handle or "me" *) 12 - url : Uri.t; (** Normalized git URL *) 13 - packages : string list; (** Opam packages from this repo *) 11 + handle : string; (** Member handle or "me" *) 12 + url : Uri.t; (** Normalized git URL *) 13 + packages : string list; (** Opam packages from this repo *) 14 14 } 15 + (** A dev-repo source from a specific member *) 15 16 16 17 (** Fork relationship between two sources *) 17 18 type relationship = 18 - | Same_url (** Same git URL *) 19 - | Same_commit (** Different URLs but same HEAD *) 20 - | I_am_ahead of int (** They forked from me, I'm N commits ahead *) 21 - | I_am_behind of int (** I forked from them, they're N commits ahead *) 19 + | Same_url (** Same git URL *) 20 + | Same_commit (** Different URLs but same HEAD *) 21 + | I_am_ahead of int (** They forked from me, I'm N commits ahead *) 22 + | I_am_behind of int (** I forked from them, they're N commits ahead *) 22 23 | Diverged of { common_ancestor : string; my_ahead : int; their_ahead : int } 23 - | Unrelated (** No common history *) 24 - | Not_fetched (** Remote not yet fetched *) 24 + | Unrelated (** No common history *) 25 + | Not_fetched (** Remote not yet fetched *) 25 26 26 - (** Analysis result for a single repository *) 27 27 type repo_analysis = { 28 - repo_name : string; (** Repository basename *) 29 - my_source : repo_source option; (** My dev-repo if I have it *) 28 + repo_name : string; (** Repository basename *) 29 + my_source : repo_source option; (** My dev-repo if I have it *) 30 30 verse_sources : (string * repo_source * relationship) list; 31 - (** (handle, source, relationship to me) *) 31 + (** (handle, source, relationship to me) *) 32 32 } 33 + (** Analysis result for a single repository *) 33 34 35 + type t = { repos : repo_analysis list } 34 36 (** Full fork analysis result *) 35 - type t = { 36 - repos : repo_analysis list; 37 - } 38 37 39 38 let pp_relationship ppf = function 40 39 | Same_url -> Fmt.string ppf "same URL" ··· 46 45 | Unrelated -> Fmt.string ppf "unrelated" 47 46 | Not_fetched -> Fmt.string ppf "not fetched" 48 47 49 - let pp_repo_source ppf src = 50 - Fmt.pf ppf "%s" (Uri.to_string src.url) 48 + let pp_repo_source ppf src = Fmt.pf ppf "%s" (Uri.to_string src.url) 51 49 52 50 let pp_repo_analysis ppf analysis = 53 51 Fmt.pf ppf "@[<v 2>%s:@," analysis.repo_name; ··· 81 79 | I_am_ahead n -> Fmt.(styled `Cyan (fun ppf -> pf ppf "-%d")) ppf n 82 80 | I_am_behind n -> Fmt.(styled `Red (fun ppf -> pf ppf "+%d")) ppf n 83 81 | Diverged { common_ancestor = _; my_ahead; their_ahead } -> 84 - Fmt.(styled `Yellow (fun ppf (a, b) -> pf ppf "+%d/-%d" a b)) ppf (their_ahead, my_ahead) 82 + Fmt.(styled `Yellow (fun ppf (a, b) -> pf ppf "+%d/-%d" a b)) 83 + ppf (their_ahead, my_ahead) 85 84 | Unrelated -> Fmt.(styled `Magenta string) ppf "?" 86 85 | Not_fetched -> Fmt.(styled `Faint string) ppf "~" 87 86 ··· 91 90 List.filter (fun (_, _, rel) -> is_actionable rel) analysis.verse_sources 92 91 in 93 92 let in_sync = 94 - List.for_all (fun (_, _, rel) -> 95 - match rel with Same_url | Same_commit -> true | _ -> false) 93 + List.for_all 94 + (fun (_, _, rel) -> 95 + match rel with Same_url | Same_commit -> true | _ -> false) 96 96 analysis.verse_sources 97 97 in 98 98 let all_not_fetched = 99 - List.for_all (fun (_, _, rel) -> 100 - match rel with Not_fetched -> true | _ -> false) 99 + List.for_all 100 + (fun (_, _, rel) -> match rel with Not_fetched -> true | _ -> false) 101 101 analysis.verse_sources 102 102 in 103 103 (actionable, in_sync, all_not_fetched) ··· 106 106 let abbrev_handle h = 107 107 (* Use first part before dot, max 3 chars *) 108 108 match String.split_on_char '.' h with 109 - | first :: _ -> if String.length first <= 4 then first else String.sub first 0 3 109 + | first :: _ -> 110 + if String.length first <= 4 then first else String.sub first 0 3 110 111 | [] -> h 111 112 112 113 (** Print a list of (handle, rel) pairs with colors *) 113 114 let pp_changes ppf actionable = 114 115 let first = ref true in 115 - List.iter (fun (h, _, rel) -> 116 - if not !first then Fmt.pf ppf " "; 117 - first := false; 118 - Fmt.pf ppf "%s%a" (abbrev_handle h) pp_rel_short rel) 116 + List.iter 117 + (fun (h, _, rel) -> 118 + if not !first then Fmt.pf ppf " "; 119 + first := false; 120 + Fmt.pf ppf "%s%a" (abbrev_handle h) pp_rel_short rel) 119 121 actionable 120 122 121 123 (** Succinct summary: dense one-line-per-repo format *) ··· 127 129 let in_sync = ref [] in 128 130 let not_mine = ref [] in 129 131 130 - List.iter (fun r -> 131 - let (actionable, is_in_sync, _) = summarize_repo r in 132 - match r.my_source with 133 - | None -> 134 - not_mine := r :: !not_mine 135 - | Some _ when actionable <> [] -> 136 - with_actions := (r, actionable) :: !with_actions 137 - | Some _ when is_in_sync -> 138 - in_sync := r :: !in_sync 139 - | Some _ -> 140 - (* Has verse sources but all same URL - treat as in sync *) 141 - in_sync := r :: !in_sync) 132 + List.iter 133 + (fun r -> 134 + let actionable, is_in_sync, _ = summarize_repo r in 135 + match r.my_source with 136 + | None -> not_mine := r :: !not_mine 137 + | Some _ when actionable <> [] -> 138 + with_actions := (r, actionable) :: !with_actions 139 + | Some _ when is_in_sync -> in_sync := r :: !in_sync 140 + | Some _ -> 141 + (* Has verse sources but all same URL - treat as in sync *) 142 + in_sync := r :: !in_sync) 142 143 t.repos; 143 144 144 145 (* Print header with counts *) ··· 146 147 let sync_count = List.length !in_sync in 147 148 let other_count = List.length !not_mine in 148 149 Fmt.pf ppf "%a %a need attention, %a synced, %a others\n" 149 - Fmt.(styled `Bold string) "Verse:" 150 - Fmt.(styled (if action_count > 0 then `Red else `Green) int) action_count 151 - Fmt.(styled `Green int) sync_count 152 - Fmt.(styled `Faint int) other_count; 150 + Fmt.(styled `Bold string) 151 + "Verse:" 152 + Fmt.(styled (if action_count > 0 then `Red else `Green) int) 153 + action_count 154 + Fmt.(styled `Green int) 155 + sync_count 156 + Fmt.(styled `Faint int) 157 + other_count; 153 158 154 159 (* Print repos needing attention - dense format *) 155 160 if !with_actions <> [] then 156 - List.iter (fun (r, actionable) -> 157 - Fmt.pf ppf " %-22s %a\n" r.repo_name pp_changes actionable) 161 + List.iter 162 + (fun (r, actionable) -> 163 + Fmt.pf ppf " %-22s %a\n" r.repo_name pp_changes actionable) 158 164 (List.rev !with_actions); 159 165 160 166 (* Print in-sync repos if show_all *) 161 167 if show_all && !in_sync <> [] then begin 162 - let in_sync_sorted = List.sort (fun a b -> String.compare a.repo_name b.repo_name) !in_sync in 163 - List.iter (fun r -> 164 - Fmt.pf ppf " %-22s %a\n" r.repo_name Fmt.(styled `Green string) "=") 168 + let in_sync_sorted = 169 + List.sort (fun a b -> String.compare a.repo_name b.repo_name) !in_sync 170 + in 171 + List.iter 172 + (fun r -> 173 + Fmt.pf ppf " %-22s %a\n" r.repo_name Fmt.(styled `Green string) "=") 165 174 in_sync_sorted 166 175 end; 167 176 ··· 169 178 if !not_mine <> [] then begin 170 179 if show_all then begin 171 180 (* List each repo with ~ *) 172 - let not_mine_sorted = List.sort (fun a b -> String.compare a.repo_name b.repo_name) !not_mine in 173 - List.iter (fun r -> 174 - let handles = List.map (fun (h, _, _) -> abbrev_handle h) r.verse_sources 175 - |> List.sort_uniq String.compare in 176 - Fmt.pf ppf " %-22s %a\n" r.repo_name 177 - Fmt.(styled `Faint (fun ppf s -> pf ppf "%s~" s)) (String.concat "," handles)) 181 + let not_mine_sorted = 182 + List.sort 183 + (fun a b -> String.compare a.repo_name b.repo_name) 184 + !not_mine 185 + in 186 + List.iter 187 + (fun r -> 188 + let handles = 189 + List.map (fun (h, _, _) -> abbrev_handle h) r.verse_sources 190 + |> List.sort_uniq String.compare 191 + in 192 + Fmt.pf ppf " %-22s %a\n" r.repo_name 193 + Fmt.(styled `Faint (fun ppf s -> pf ppf "%s~" s)) 194 + (String.concat "," handles)) 178 195 not_mine_sorted 179 - end else begin 196 + end 197 + else begin 180 198 (* Compact summary *) 181 199 let grouped = Hashtbl.create 16 in 182 - List.iter (fun r -> 183 - List.iter (fun (h, _, _) -> 184 - let existing = try Hashtbl.find grouped h with Not_found -> [] in 185 - Hashtbl.replace grouped h (r.repo_name :: existing)) 186 - r.verse_sources) 200 + List.iter 201 + (fun r -> 202 + List.iter 203 + (fun (h, _, _) -> 204 + let existing = 205 + try Hashtbl.find grouped h with Not_found -> [] 206 + in 207 + Hashtbl.replace grouped h (r.repo_name :: existing)) 208 + r.verse_sources) 187 209 !not_mine; 188 - Fmt.pf ppf " %a " Fmt.(styled (`Bold) string) "Others:"; 210 + Fmt.pf ppf " %a " Fmt.(styled `Bold string) "Others:"; 189 211 let first = ref true in 190 - Hashtbl.iter (fun h repos -> 191 - if not !first then Fmt.pf ppf ", "; 192 - first := false; 193 - Fmt.(styled `Faint (fun ppf (h, n) -> pf ppf "%s(%d)" h n)) ppf (abbrev_handle h, List.length repos)) 212 + Hashtbl.iter 213 + (fun h repos -> 214 + if not !first then Fmt.pf ppf ", "; 215 + first := false; 216 + Fmt.(styled `Faint (fun ppf (h, n) -> pf ppf "%s(%d)" h n)) 217 + ppf 218 + (abbrev_handle h, List.length repos)) 194 219 grouped; 195 220 Fmt.pf ppf "\n" 196 221 end ··· 199 224 200 225 let pp_summary ppf t = pp_summary' ~show_all:false ppf t 201 226 202 - (** Normalize a git URL for comparison. 203 - Handles: git+https, https, git@, with/without .git suffix *) 227 + (** Normalize a git URL for comparison. Handles: git+https, https, git@, 228 + with/without .git suffix *) 204 229 let normalize_url url = 205 230 let s = Uri.to_string url in 206 231 (* Strip git+ prefix *) 207 - let s = if String.starts_with ~prefix:"git+" s then 232 + let s = 233 + if String.starts_with ~prefix:"git+" s then 208 234 String.sub s 4 (String.length s - 4) 209 235 else s 210 236 in ··· 219 245 else s 220 246 in 221 247 (* Strip .git suffix *) 222 - let s = if String.ends_with ~suffix:".git" s then 248 + let s = 249 + if String.ends_with ~suffix:".git" s then 223 250 String.sub s 0 (String.length s - 4) 224 251 else s 225 252 in 226 253 (* Strip trailing slash *) 227 - let s = if String.ends_with ~suffix:"/" s then 228 - String.sub s 0 (String.length s - 1) 254 + let s = 255 + if String.ends_with ~suffix:"/" s then String.sub s 0 (String.length s - 1) 229 256 else s 230 257 in 231 258 Uri.of_string s ··· 257 284 let versions = Eio.Path.read_dir eio_pkg in 258 285 match versions with 259 286 | [] -> None 260 - | version :: _ -> 287 + | version :: _ -> ( 261 288 let opam_path = Fpath.(pkg_dir / version / "opam") in 262 289 let eio_opam = Eio.Path.(fs / Fpath.to_string opam_path) in 263 290 try 264 291 let content = Eio.Path.load eio_opam in 265 - let opamfile = OpamParser.FullPos.string content (Fpath.to_string opam_path) in 292 + let opamfile = 293 + OpamParser.FullPos.string content (Fpath.to_string opam_path) 294 + in 266 295 match Opam_repo.find_dev_repo opamfile.file_contents with 267 296 | None -> None 268 297 | Some url_str -> 269 298 if Opam_repo.is_git_url url_str then 270 299 Some (pkg_name, Opam_repo.normalize_git_url url_str) 271 300 else None 272 - with _ -> None 301 + with _ -> None) 273 302 with _ -> None) 274 303 package_names 275 304 with _ -> [] ··· 277 306 (** Fetch a verse opam repo *) 278 307 let fetch_verse_opam_repo ~proc ~fs path = 279 308 let cwd = Eio.Path.(fs / Fpath.to_string path) in 280 - let cmd = ["git"; "fetch"; "--quiet"] in 281 - Log.debug (fun m -> m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp path); 309 + let cmd = [ "git"; "fetch"; "--quiet" ] in 310 + Log.debug (fun m -> 311 + m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp path); 282 312 Eio.Switch.run @@ fun sw -> 283 - let child = Eio.Process.spawn proc ~sw ~cwd 313 + let child = 314 + Eio.Process.spawn proc ~sw ~cwd 284 315 ~stdout:(Eio.Flow.buffer_sink (Buffer.create 16)) 285 316 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 16)) 286 317 cmd ··· 289 320 | `Exited 0 -> () 290 321 | _ -> Log.debug (fun m -> m "Failed to fetch %a" Fpath.pp path) 291 322 292 - (** Scan all verse opam repos and build a map: repo_basename -> [(handle, url, [packages])] *) 323 + (** Scan all verse opam repos and build a map: repo_basename -> 324 + [(handle, url, [packages])] *) 293 325 let scan_all_verse_opam_repos ~proc ~fs ~verse_path () = 294 326 let eio_verse = Eio.Path.(fs / Fpath.to_string verse_path) in 295 327 let entries = try Eio.Path.read_dir eio_verse with _ -> [] in 296 328 (* Find opam repo directories (ending in -opam) *) 297 - let opam_dirs = List.filter (fun name -> String.ends_with ~suffix:"-opam" name) entries in 329 + let opam_dirs = 330 + List.filter (fun name -> String.ends_with ~suffix:"-opam" name) entries 331 + in 298 332 (* Fetch each opam repo first *) 299 333 Log.info (fun m -> m "Fetching %d verse opam repos" (List.length opam_dirs)); 300 - List.iter (fun opam_dir -> 301 - let opam_path = Fpath.(verse_path / opam_dir) in 302 - fetch_verse_opam_repo ~proc ~fs opam_path) 334 + List.iter 335 + (fun opam_dir -> 336 + let opam_path = Fpath.(verse_path / opam_dir) in 337 + fetch_verse_opam_repo ~proc ~fs opam_path) 303 338 opam_dirs; 304 339 (* Build map: repo_basename -> [(handle, url, [packages])] *) 305 340 let repo_map = Hashtbl.create 64 in 306 341 List.iter 307 342 (fun opam_dir -> 308 - let handle = String.sub opam_dir 0 (String.length opam_dir - 5) in (* strip -opam *) 343 + let handle = String.sub opam_dir 0 (String.length opam_dir - 5) in 344 + (* strip -opam *) 309 345 let opam_path = Fpath.(verse_path / opam_dir) in 310 346 let pkg_urls = scan_verse_opam_repo ~fs opam_path in 311 347 (* Group by repo basename *) ··· 313 349 List.iter 314 350 (fun (pkg_name, url) -> 315 351 let repo = repo_basename url in 316 - let existing = try Hashtbl.find by_repo repo with Not_found -> (url, []) in 317 - let (existing_url, pkgs) = existing in 352 + let existing = 353 + try Hashtbl.find by_repo repo with Not_found -> (url, []) 354 + in 355 + let existing_url, pkgs = existing in 318 356 Hashtbl.replace by_repo repo (existing_url, pkg_name :: pkgs)) 319 357 pkg_urls; 320 358 (* Add to main map *) 321 359 Hashtbl.iter 322 360 (fun repo (url, pkgs) -> 323 361 let source = { handle; url; packages = pkgs } in 324 - let existing = try Hashtbl.find repo_map repo with Not_found -> [] in 362 + let existing = 363 + try Hashtbl.find repo_map repo with Not_found -> [] 364 + in 325 365 Hashtbl.replace repo_map repo (source :: existing)) 326 366 by_repo) 327 367 opam_dirs; ··· 337 377 (fun pkg -> 338 378 let repo = Package.repo_name pkg in 339 379 let url = Package.dev_repo pkg in 340 - let existing = try Hashtbl.find repo_map repo with Not_found -> (url, []) in 341 - let (_, pkgs) = existing in 380 + let existing = 381 + try Hashtbl.find repo_map repo with Not_found -> (url, []) 382 + in 383 + let _, pkgs = existing in 342 384 Hashtbl.replace repo_map repo (url, Package.name pkg :: pkgs)) 343 385 packages; 344 386 repo_map ··· 349 391 (** Check if a remote exists *) 350 392 let remote_exists ~proc ~fs ~repo remote_name = 351 393 let cwd = Eio.Path.(fs / Fpath.to_string repo) in 352 - let result = Eio.Switch.run @@ fun sw -> 394 + let result = 395 + Eio.Switch.run @@ fun sw -> 353 396 let buf = Buffer.create 256 in 354 - let child = Eio.Process.spawn proc ~sw ~cwd 355 - ~stdout:(Eio.Flow.buffer_sink buf) 397 + let child = 398 + Eio.Process.spawn proc ~sw ~cwd ~stdout:(Eio.Flow.buffer_sink buf) 356 399 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 16)) 357 - ["git"; "remote"; "get-url"; remote_name] 400 + [ "git"; "remote"; "get-url"; remote_name ] 358 401 in 359 - match Eio.Process.await child with 360 - | `Exited 0 -> true 361 - | _ -> false 402 + match Eio.Process.await child with `Exited 0 -> true | _ -> false 362 403 in 363 404 result 364 405 365 406 (** Add a git remote *) 366 407 let add_remote ~proc ~fs ~repo ~name ~url () = 367 408 let cwd = Eio.Path.(fs / Fpath.to_string repo) in 368 - let cmd = ["git"; "remote"; "add"; name; Uri.to_string url] in 369 - Log.debug (fun m -> m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 409 + let cmd = [ "git"; "remote"; "add"; name; Uri.to_string url ] in 410 + Log.debug (fun m -> 411 + m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 370 412 Eio.Switch.run @@ fun sw -> 371 - let child = Eio.Process.spawn proc ~sw ~cwd 413 + let child = 414 + Eio.Process.spawn proc ~sw ~cwd 372 415 ~stdout:(Eio.Flow.buffer_sink (Buffer.create 16)) 373 416 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 16)) 374 417 cmd ··· 380 423 (** Fetch a remote *) 381 424 let fetch_remote ~proc ~fs ~repo ~remote () = 382 425 let cwd = Eio.Path.(fs / Fpath.to_string repo) in 383 - let cmd = ["git"; "fetch"; remote] in 426 + let cmd = [ "git"; "fetch"; remote ] in 384 427 Log.info (fun m -> m "Fetching %s in %a" remote Fpath.pp repo); 385 - Log.debug (fun m -> m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 428 + Log.debug (fun m -> 429 + m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 386 430 Eio.Switch.run @@ fun sw -> 387 - let child = Eio.Process.spawn proc ~sw ~cwd 431 + let child = 432 + Eio.Process.spawn proc ~sw ~cwd 388 433 ~stdout:(Eio.Flow.buffer_sink (Buffer.create 256)) 389 434 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 256)) 390 435 cmd ··· 396 441 (** Get the commit SHA for a ref *) 397 442 let get_ref_commit ~proc ~fs ~repo ref_name = 398 443 let cwd = Eio.Path.(fs / Fpath.to_string repo) in 399 - let cmd = ["git"; "rev-parse"; ref_name] in 400 - Log.debug (fun m -> m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 444 + let cmd = [ "git"; "rev-parse"; ref_name ] in 445 + Log.debug (fun m -> 446 + m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 401 447 Eio.Switch.run @@ fun sw -> 402 448 let buf = Buffer.create 64 in 403 - let child = Eio.Process.spawn proc ~sw ~cwd 404 - ~stdout:(Eio.Flow.buffer_sink buf) 449 + let child = 450 + Eio.Process.spawn proc ~sw ~cwd ~stdout:(Eio.Flow.buffer_sink buf) 405 451 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 16)) 406 452 cmd 407 453 in ··· 416 462 match (my_commit, their_commit) with 417 463 | None, _ | _, None -> Not_fetched 418 464 | Some my_sha, Some their_sha when my_sha = their_sha -> Same_commit 419 - | Some my_sha, Some their_sha -> 465 + | Some my_sha, Some their_sha -> ( 420 466 (* Check ancestry *) 421 467 let cwd = Eio.Path.(fs / Fpath.to_string repo) in 422 468 let is_ancestor commit1 commit2 = 423 - let cmd = ["git"; "merge-base"; "--is-ancestor"; commit1; commit2] in 424 - Log.debug (fun m -> m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 469 + let cmd = [ "git"; "merge-base"; "--is-ancestor"; commit1; commit2 ] in 470 + Log.debug (fun m -> 471 + m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 425 472 Eio.Switch.run @@ fun sw -> 426 - let child = Eio.Process.spawn proc ~sw ~cwd 473 + let child = 474 + Eio.Process.spawn proc ~sw ~cwd 427 475 ~stdout:(Eio.Flow.buffer_sink (Buffer.create 16)) 428 476 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 16)) 429 477 cmd 430 478 in 431 - match Eio.Process.await child with 432 - | `Exited 0 -> true 433 - | _ -> false 479 + match Eio.Process.await child with `Exited 0 -> true | _ -> false 434 480 in 435 481 let count_commits base head = 436 - let cmd = ["git"; "rev-list"; "--count"; base ^ ".." ^ head] in 437 - Log.debug (fun m -> m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 482 + let cmd = [ "git"; "rev-list"; "--count"; base ^ ".." ^ head ] in 483 + Log.debug (fun m -> 484 + m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 438 485 Eio.Switch.run @@ fun sw -> 439 486 let buf = Buffer.create 16 in 440 - let child = Eio.Process.spawn proc ~sw ~cwd 441 - ~stdout:(Eio.Flow.buffer_sink buf) 487 + let child = 488 + Eio.Process.spawn proc ~sw ~cwd ~stdout:(Eio.Flow.buffer_sink buf) 442 489 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 16)) 443 490 cmd 444 491 in 445 492 match Eio.Process.await child with 446 - | `Exited 0 -> (try int_of_string (String.trim (Buffer.contents buf)) with _ -> 0) 493 + | `Exited 0 -> ( 494 + try int_of_string (String.trim (Buffer.contents buf)) with _ -> 0) 447 495 | _ -> 0 448 496 in 449 497 let my_is_ancestor = is_ancestor my_sha their_sha in 450 498 let their_is_ancestor = is_ancestor their_sha my_sha in 451 499 match (my_is_ancestor, their_is_ancestor) with 452 - | true, true -> Same_commit (* shouldn't happen if SHAs differ *) 500 + | true, true -> Same_commit (* shouldn't happen if SHAs differ *) 453 501 | true, false -> 454 502 (* My commit is ancestor of theirs -> I'm behind *) 455 503 let behind = count_commits my_sha their_sha in ··· 458 506 (* Their commit is ancestor of mine -> I'm ahead *) 459 507 let ahead = count_commits their_sha my_sha in 460 508 I_am_ahead ahead 461 - | false, false -> 509 + | false, false -> ( 462 510 (* Check for common ancestor *) 463 - let cmd = ["git"; "merge-base"; my_sha; their_sha] in 464 - Log.debug (fun m -> m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 511 + let cmd = [ "git"; "merge-base"; my_sha; their_sha ] in 512 + Log.debug (fun m -> 513 + m "Running: %s (in %a)" (String.concat " " cmd) Fpath.pp repo); 465 514 let merge_base = 466 515 Eio.Switch.run @@ fun sw -> 467 516 let buf = Buffer.create 64 in 468 - let child = Eio.Process.spawn proc ~sw ~cwd 469 - ~stdout:(Eio.Flow.buffer_sink buf) 517 + let child = 518 + Eio.Process.spawn proc ~sw ~cwd ~stdout:(Eio.Flow.buffer_sink buf) 470 519 ~stderr:(Eio.Flow.buffer_sink (Buffer.create 16)) 471 520 cmd 472 521 in ··· 479 528 | Some base -> 480 529 let my_ahead = count_commits base my_sha in 481 530 let their_ahead = count_commits base their_sha in 482 - Diverged { common_ancestor = base; my_ahead; their_ahead } 531 + Diverged { common_ancestor = base; my_ahead; their_ahead })) 483 532 484 533 (** Compute fork analysis for all repos *) 485 534 let compute ~proc ~fs ~verse_config ~monopam_config () = ··· 530 579 match my_source with 531 580 | Some my when urls_equal my.url src.url -> Same_url 532 581 | _ when not have_checkout -> Not_fetched 533 - | _ -> 582 + | _ -> ( 534 583 let remote_name = verse_remote_name src.handle in 535 584 (* Add remote if needed *) 536 - if not (remote_exists ~proc ~fs ~repo:checkout_path remote_name) then begin 537 - Log.info (fun m -> m "Adding remote %s -> %a" remote_name Uri.pp src.url); 538 - ignore (add_remote ~proc ~fs ~repo:checkout_path ~name:remote_name ~url:src.url ()) 585 + if 586 + not 587 + (remote_exists ~proc ~fs ~repo:checkout_path 588 + remote_name) 589 + then begin 590 + Log.info (fun m -> 591 + m "Adding remote %s -> %a" remote_name Uri.pp 592 + src.url); 593 + ignore 594 + (add_remote ~proc ~fs ~repo:checkout_path 595 + ~name:remote_name ~url:src.url ()) 539 596 end; 540 597 (* Fetch remote *) 541 - (match fetch_remote ~proc ~fs ~repo:checkout_path ~remote:remote_name () with 598 + match 599 + fetch_remote ~proc ~fs ~repo:checkout_path 600 + ~remote:remote_name () 601 + with 542 602 | Error _ -> Not_fetched 543 603 | Ok () -> 544 604 (* Compare refs *) 545 605 let my_ref = "origin/main" in 546 606 let their_ref = remote_name ^ "/main" in 547 - compare_refs ~proc ~fs ~repo:checkout_path ~my_ref ~their_ref ()) 607 + compare_refs ~proc ~fs ~repo:checkout_path ~my_ref 608 + ~their_ref ()) 548 609 in 549 610 (src.handle, src, rel)) 550 611 verse_sources ··· 554 615 all_repos [] 555 616 in 556 617 (* Sort by repo name *) 557 - let repos = List.sort (fun a b -> String.compare a.repo_name b.repo_name) analyses in 618 + let repos = 619 + List.sort (fun a b -> String.compare a.repo_name b.repo_name) analyses 620 + in 558 621 { repos }
+29 -31
lib/forks.mli
··· 1 1 (** Fork graph discovery via verse opam repos. 2 2 3 - Scans verse opam repos to discover dev-repo URLs, adds git remotes 4 - to local checkouts, and computes fork relationships. *) 3 + Scans verse opam repos to discover dev-repo URLs, adds git remotes to local 4 + checkouts, and computes fork relationships. *) 5 5 6 6 (** {1 Types} *) 7 7 8 - (** A dev-repo source from a specific member *) 9 8 type repo_source = { 10 - handle : string; (** Member handle or "me" *) 11 - url : Uri.t; (** Normalized git URL *) 12 - packages : string list; (** Opam packages from this repo *) 9 + handle : string; (** Member handle or "me" *) 10 + url : Uri.t; (** Normalized git URL *) 11 + packages : string list; (** Opam packages from this repo *) 13 12 } 13 + (** A dev-repo source from a specific member *) 14 14 15 15 (** Fork relationship between two sources *) 16 16 type relationship = 17 - | Same_url (** Same git URL *) 18 - | Same_commit (** Different URLs but same HEAD *) 19 - | I_am_ahead of int (** They forked from me, I'm N commits ahead *) 20 - | I_am_behind of int (** I forked from them, they're N commits ahead *) 17 + | Same_url (** Same git URL *) 18 + | Same_commit (** Different URLs but same HEAD *) 19 + | I_am_ahead of int (** They forked from me, I'm N commits ahead *) 20 + | I_am_behind of int (** I forked from them, they're N commits ahead *) 21 21 | Diverged of { common_ancestor : string; my_ahead : int; their_ahead : int } 22 - | Unrelated (** No common history *) 23 - | Not_fetched (** Remote not yet fetched *) 22 + | Unrelated (** No common history *) 23 + | Not_fetched (** Remote not yet fetched *) 24 24 25 - (** Analysis result for a single repository *) 26 25 type repo_analysis = { 27 - repo_name : string; (** Repository basename *) 28 - my_source : repo_source option; (** My dev-repo if I have it *) 26 + repo_name : string; (** Repository basename *) 27 + my_source : repo_source option; (** My dev-repo if I have it *) 29 28 verse_sources : (string * repo_source * relationship) list; 30 - (** (handle, source, relationship to me) *) 29 + (** (handle, source, relationship to me) *) 31 30 } 31 + (** Analysis result for a single repository *) 32 32 33 + type t = { repos : repo_analysis list } 33 34 (** Full fork analysis result *) 34 - type t = { 35 - repos : repo_analysis list; 36 - } 37 35 38 36 (** {1 Pretty Printing} *) 39 37 40 38 val pp_relationship : relationship Fmt.t 41 39 val pp_repo_source : repo_source Fmt.t 42 40 val pp_repo_analysis : repo_analysis Fmt.t 41 + 43 42 val pp : t Fmt.t 44 43 (** Verbose output with full URLs for each repo. *) 45 44 46 45 val pp_summary : t Fmt.t 47 - (** Succinct summary: one line per repo with emphasis on repos where 48 - others have commits not in mine. *) 46 + (** Succinct summary: one line per repo with emphasis on repos where others have 47 + commits not in mine. *) 49 48 50 49 val pp_summary' : show_all:bool -> t Fmt.t 51 50 (** [pp_summary' ~show_all] formats a succinct summary. When [show_all] is true, 52 51 lists all repos that others have but you don't. *) 53 52 54 53 val is_actionable : relationship -> bool 55 - (** [is_actionable rel] returns [true] if the relationship indicates 56 - that others have commits I should consider pulling (I_am_behind or Diverged). *) 54 + (** [is_actionable rel] returns [true] if the relationship indicates that others 55 + have commits I should consider pulling (I_am_behind or Diverged). *) 57 56 58 57 (** {1 URL Utilities} *) 59 58 60 59 val normalize_url : Uri.t -> Uri.t 61 - (** [normalize_url url] normalizes a git URL for comparison. 62 - Converts SSH to HTTPS, strips git+ prefix and .git suffix. *) 60 + (** [normalize_url url] normalizes a git URL for comparison. Converts SSH to 61 + HTTPS, strips git+ prefix and .git suffix. *) 63 62 64 63 val urls_equal : Uri.t -> Uri.t -> bool 65 64 (** [urls_equal url1 url2] checks if two URLs refer to the same repo. *) ··· 77 76 unit -> 78 77 t 79 78 (** [compute ~proc ~fs ~verse_config ~monopam_config ()] performs full fork 80 - analysis by: 81 - 1. Scanning my opam repo for dev-repo URLs 82 - 2. Scanning all verse opam repos for dev-repo URLs 83 - 3. Adding git remotes to my checkouts for each member's fork 84 - 4. Fetching remotes and comparing commit histories 79 + analysis by: 1. Scanning my opam repo for dev-repo URLs 2. Scanning all 80 + verse opam repos for dev-repo URLs 3. Adding git remotes to my checkouts for 81 + each member's fork 4. Fetching remotes and comparing commit histories 85 82 86 - This is an expensive operation as it fetches from all verse member remotes. *) 83 + This is an expensive operation as it fetches from all verse member remotes. 84 + *)
+39 -40
lib/git.ml
··· 60 60 try 61 61 let result = run_git ~proc ~cwd [ "rev-parse"; "--git-dir" ] in 62 62 result.exit_code = 0 63 - with Eio.Io _ -> false (* Directory doesn't exist or not accessible *) 63 + with Eio.Io _ -> false (* Directory doesn't exist or not accessible *) 64 64 65 65 let is_dirty ~proc ~fs path = 66 66 let cwd = path_to_eio ~fs path in ··· 228 228 229 229 let add_remote ~proc ~fs ~name ~url path = 230 230 let cwd = path_to_eio ~fs path in 231 - run_git_ok ~proc ~cwd [ "remote"; "add"; name; url ] 232 - |> Result.map ignore 231 + run_git_ok ~proc ~cwd [ "remote"; "add"; name; url ] |> Result.map ignore 233 232 234 233 let remove_remote ~proc ~fs ~name path = 235 234 let cwd = path_to_eio ~fs path in 236 - run_git_ok ~proc ~cwd [ "remote"; "remove"; name ] 237 - |> Result.map ignore 235 + run_git_ok ~proc ~cwd [ "remote"; "remove"; name ] |> Result.map ignore 238 236 239 237 let set_remote_url ~proc ~fs ~name ~url path = 240 238 let cwd = path_to_eio ~fs path in 241 - run_git_ok ~proc ~cwd [ "remote"; "set-url"; name; url ] 242 - |> Result.map ignore 239 + run_git_ok ~proc ~cwd [ "remote"; "set-url"; name; url ] |> Result.map ignore 243 240 244 241 let ensure_remote ~proc ~fs ~name ~url path = 245 242 let remotes = list_remotes ~proc ~fs path in ··· 249 246 | Some existing_url when existing_url = url -> Ok () 250 247 | _ -> set_remote_url ~proc ~fs ~name ~url path 251 248 end 252 - else 253 - add_remote ~proc ~fs ~name ~url path 249 + else add_remote ~proc ~fs ~name ~url path 254 250 255 251 type log_entry = { 256 252 hash : string; ··· 296 292 let args = 297 293 match until with Some u -> args @ [ "--until=" ^ u ] | None -> args 298 294 in 299 - let args = match filter_path with Some p -> args @ [ "--"; p ] | None -> args in 295 + let args = 296 + match filter_path with Some p -> args @ [ "--"; p ] | None -> args 297 + in 300 298 match run_git_ok ~proc ~cwd args with 301 299 | Ok output -> Ok (parse_log_entries output) 302 300 | Error e -> Error e ··· 306 304 let format_arg = "--format=%H%n%an%n%aI%n%s%n%b%x00" in 307 305 let range = Printf.sprintf "%s..%s" base tip in 308 306 let args = [ "log"; format_arg; range ] in 309 - let args = match max_count with 307 + let args = 308 + match max_count with 310 309 | Some n -> args @ [ "-n"; string_of_int n ] 311 310 | None -> args 312 311 in ··· 314 313 | Ok output -> Ok (parse_log_entries output) 315 314 | Error e -> Error e 316 315 317 - (** Parse a subtree merge/squash commit message to extract the upstream commit range. 318 - Messages look like: "Squashed 'prefix/' changes from abc123..def456" 319 - or "Squashed 'prefix/' content from commit abc123" 320 - Returns the end commit (most recent) if found. *) 316 + (** Parse a subtree merge/squash commit message to extract the upstream commit 317 + range. Messages look like: "Squashed 'prefix/' changes from abc123..def456" 318 + or "Squashed 'prefix/' content from commit abc123" Returns the end commit 319 + (most recent) if found. *) 321 320 let parse_subtree_message subject = 322 321 (* Helper to extract hex commit hash starting at position *) 323 322 let extract_hex s start = 324 323 let len = String.length s in 325 324 let rec find_end i = 326 325 if i >= len then i 327 - else match s.[i] with 328 - | '0'..'9' | 'a'..'f' -> find_end (i + 1) 329 - | _ -> i 326 + else 327 + match s.[i] with '0' .. '9' | 'a' .. 'f' -> find_end (i + 1) | _ -> i 330 328 in 331 329 let end_pos = find_end start in 332 330 if end_pos > start then Some (String.sub s start (end_pos - start)) ··· 337 335 match String.index_opt subject '.' with 338 336 | Some i when i + 1 < String.length subject && subject.[i + 1] = '.' -> 339 337 extract_hex subject (i + 2) 340 - | _ -> 338 + | _ -> ( 341 339 (* Pattern 2: "Squashed 'prefix/' content from commit abc123" *) 342 - (match String.split_on_char ' ' subject |> List.rev with 340 + match String.split_on_char ' ' subject |> List.rev with 343 341 | last :: "commit" :: "from" :: _ -> extract_hex last 0 344 - | _ -> None) 345 - (* Pattern 3: "Add 'prefix/' from commit abc123" *) 342 + | _ -> None) (* Pattern 3: "Add 'prefix/' from commit abc123" *) 346 343 else if String.starts_with ~prefix:"Add '" subject then 347 344 match String.split_on_char ' ' subject |> List.rev with 348 345 | last :: "commit" :: "from" :: _ -> extract_hex last 0 349 346 | _ -> None 350 - else 351 - None 347 + else None 352 348 353 - (** Find the last subtree-related commit for a given prefix. 354 - Searches git log for commits with subtree merge/squash messages. *) 349 + (** Find the last subtree-related commit for a given prefix. Searches git log 350 + for commits with subtree merge/squash messages. *) 355 351 let subtree_last_upstream_commit ~proc ~fs ~repo ~prefix () = 356 352 let cwd = path_to_eio ~fs repo in 357 353 (* Search for subtree-related commits - don't use path filter as it can miss merge commits *) 358 354 let grep_pattern = Printf.sprintf "^Squashed '%s/'" prefix in 359 - match run_git_ok ~proc ~cwd 360 - [ "log"; "--oneline"; "-1"; "--grep"; grep_pattern ] with 355 + match 356 + run_git_ok ~proc ~cwd [ "log"; "--oneline"; "-1"; "--grep"; grep_pattern ] 357 + with 361 358 | Error _ -> None 362 - | Ok "" -> 359 + | Ok "" -> ( 363 360 (* Try alternate pattern: Add 'prefix/' from commit *) 364 361 let add_pattern = Printf.sprintf "^Add '%s/'" prefix in 365 - (match run_git_ok ~proc ~cwd 366 - [ "log"; "--oneline"; "-1"; "--grep"; add_pattern ] with 362 + match 363 + run_git_ok ~proc ~cwd 364 + [ "log"; "--oneline"; "-1"; "--grep"; add_pattern ] 365 + with 367 366 | Error _ -> None 368 367 | Ok "" -> None 369 - | Ok line -> 368 + | Ok line -> ( 370 369 (* line is "abc1234 Add 'prefix/' from commit ..." *) 371 370 let hash = String.sub line 0 (min 7 (String.length line)) in 372 371 (* Get the full commit message to parse *) 373 372 match run_git_ok ~proc ~cwd [ "log"; "-1"; "--format=%s"; hash ] with 374 373 | Error _ -> None 375 - | Ok subject -> parse_subtree_message subject) 376 - | Ok line -> 374 + | Ok subject -> parse_subtree_message subject)) 375 + | Ok line -> ( 377 376 let hash = String.sub line 0 (min 7 (String.length line)) in 378 377 match run_git_ok ~proc ~cwd [ "log"; "-1"; "--format=%s"; hash ] with 379 378 | Error _ -> None 380 - | Ok subject -> parse_subtree_message subject 379 + | Ok subject -> parse_subtree_message subject) 381 380 382 381 (** Check if commit1 is an ancestor of commit2. *) 383 382 let is_ancestor ~proc ~fs ~repo ~commit1 ~commit2 () = 384 383 let cwd = path_to_eio ~fs repo in 385 - let result = run_git ~proc ~cwd 386 - [ "merge-base"; "--is-ancestor"; commit1; commit2 ] in 384 + let result = 385 + run_git ~proc ~cwd [ "merge-base"; "--is-ancestor"; commit1; commit2 ] 386 + in 387 387 result.exit_code = 0 388 388 389 389 (** Find the merge-base (common ancestor) of two commits. *) ··· 394 394 (** Count commits between two commits (exclusive of base, inclusive of head). *) 395 395 let count_commits_between ~proc ~fs ~repo ~base ~head () = 396 396 let cwd = path_to_eio ~fs repo in 397 - match run_git_ok ~proc ~cwd 398 - [ "rev-list"; "--count"; base ^ ".." ^ head ] with 397 + match run_git_ok ~proc ~cwd [ "rev-list"; "--count"; base ^ ".." ^ head ] with 399 398 | Error _ -> 0 400 - | Ok s -> try int_of_string (String.trim s) with _ -> 0 399 + | Ok s -> ( try int_of_string (String.trim s) with _ -> 0)
+9 -7
lib/git.mli
··· 276 276 ?remote:string -> 277 277 Fpath.t -> 278 278 string option 279 - (** [get_push_url ~proc ~fs ?remote path] returns the push URL for a remote, 280 - or [None] if not set or the remote doesn't exist. 279 + (** [get_push_url ~proc ~fs ?remote path] returns the push URL for a remote, or 280 + [None] if not set or the remote doesn't exist. 281 281 282 282 @param remote Remote name (default: "origin") *) 283 283 ··· 322 322 url:string -> 323 323 Fpath.t -> 324 324 (unit, error) result 325 - (** [set_remote_url ~proc ~fs ~name ~url path] updates the URL for an existing remote. *) 325 + (** [set_remote_url ~proc ~fs ~name ~url path] updates the URL for an existing 326 + remote. *) 326 327 327 328 val ensure_remote : 328 329 proc:_ Eio.Process.mgr -> ··· 331 332 url:string -> 332 333 Fpath.t -> 333 334 (unit, error) result 334 - (** [ensure_remote ~proc ~fs ~name ~url path] ensures a remote exists with the given URL. 335 - If the remote exists with a different URL, it is updated. 336 - If the remote doesn't exist, it is added. *) 335 + (** [ensure_remote ~proc ~fs ~name ~url path] ensures a remote exists with the 336 + given URL. If the remote exists with a different URL, it is updated. If the 337 + remote doesn't exist, it is added. *) 337 338 338 339 (** {1 Commit History} *) 339 340 ··· 369 370 ?max_count:int -> 370 371 Fpath.t -> 371 372 (log_entry list, error) result 372 - (** [log_range ~proc ~fs ~base ~tip ?max_count repo] retrieves commits between refs. 373 + (** [log_range ~proc ~fs ~base ~tip ?max_count repo] retrieves commits between 374 + refs. 373 375 374 376 Gets commits reachable from [tip] but not from [base] (i.e., [base..tip]). 375 377
+543 -296
lib/monopam.ml
··· 34 34 | Package_not_found name -> Fmt.pf ppf "Package not found: %s" name 35 35 | Claude_error msg -> Fmt.pf ppf "Claude error: %s" msg 36 36 37 - (** Returns a hint string for the given error, or None if no hint is available. *) 37 + (** Returns a hint string for the given error, or None if no hint is available. 38 + *) 38 39 let error_hint = function 39 40 | Config_error _ -> 40 - Some "Run 'monopam verse init --handle <your-handle>' to create a workspace." 41 + Some 42 + "Run 'monopam verse init --handle <your-handle>' to create a workspace." 41 43 | Repo_error (Opam_repo.No_dev_repo _) -> 42 - Some "Add a 'dev-repo' field to the package's opam file pointing to a git URL." 44 + Some 45 + "Add a 'dev-repo' field to the package's opam file pointing to a git \ 46 + URL." 43 47 | Repo_error (Opam_repo.Not_git_remote _) -> 44 48 Some "The dev-repo must be a git URL (git+https:// or git://)." 45 49 | Repo_error _ -> None ··· 53 57 Some "Check that the remote is configured: git remote -v" 54 58 | Git_error (Git.Branch_not_found _) -> 55 59 Some "Check available branches: git branch -a" 56 - | Git_error (Git.Command_failed (cmd, _)) when String.starts_with ~prefix:"git push" cmd -> 60 + | Git_error (Git.Command_failed (cmd, _)) 61 + when String.starts_with ~prefix:"git push" cmd -> 57 62 Some "Check your network connection and git credentials." 58 - | Git_error (Git.Command_failed (cmd, _)) when String.starts_with ~prefix:"git subtree" cmd -> 63 + | Git_error (Git.Command_failed (cmd, _)) 64 + when String.starts_with ~prefix:"git subtree" cmd -> 59 65 Some "Run 'monopam status' to check repository state." 60 66 | Git_error _ -> None 61 67 | Dirty_state _ -> 62 - Some "Commit changes in the monorepo first: cd mono && git add -A && git commit" 68 + Some 69 + "Commit changes in the monorepo first: cd mono && git add -A && git \ 70 + commit" 63 71 | Package_not_found _ -> 64 72 Some "Check available packages: ls opam-repo/packages/" 65 73 | Claude_error msg when String.starts_with ~prefix:"Failed to decode" msg -> ··· 131 139 (fun pkg -> 132 140 let repo = Package.repo_name pkg in 133 141 let name = Package.name pkg in 134 - let existing = try Hashtbl.find registered_by_repo repo with Not_found -> [] in 142 + let existing = 143 + try Hashtbl.find registered_by_repo repo with Not_found -> [] 144 + in 135 145 Hashtbl.replace registered_by_repo repo (name :: existing)) 136 146 pkgs; 137 147 (* Get unique subtree directories *) ··· 153 163 let repo = Package.repo_name pkg in 154 164 let subtree_dir = Fpath.(monorepo / Package.subtree_prefix pkg) in 155 165 let eio_path = Eio.Path.(fs / Fpath.to_string subtree_dir) in 156 - let registered = try Hashtbl.find registered_by_repo repo with Not_found -> [] in 166 + let registered = 167 + try Hashtbl.find registered_by_repo repo with Not_found -> [] 168 + in 157 169 try 158 170 Eio.Path.read_dir eio_path 159 171 |> List.filter_map (fun name -> ··· 240 252 else dev_repo 241 253 in 242 254 let repo_cell = 243 - if i = 0 then Printf.sprintf "[**%s**](%s)" repo display_url 244 - else "" 255 + if i = 0 then Printf.sprintf "[**%s**](%s)" repo display_url else "" 245 256 in 246 257 let synopsis = Option.value ~default:"" (Package.synopsis pkg) in 247 258 Buffer.add_string buf 248 - (Printf.sprintf "| %s | %s | %s |\n" repo_cell 249 - (Package.name pkg) synopsis)) 259 + (Printf.sprintf "| %s | %s | %s |\n" repo_cell (Package.name pkg) 260 + synopsis)) 250 261 pkgs) 251 262 grouped; 252 263 Buffer.add_string buf "\n---\n\n"; ··· 365 376 (** Collect all external dependencies by scanning monorepo subtree directories. 366 377 This scans all .opam files in each subtree directory to find dependencies, 367 378 ensuring we get dependencies from all packages in a directory, not just 368 - those registered in the opam overlay. 369 - Returns a sorted, deduplicated list of package names that are dependencies 370 - but not packages in the repo itself. *) 379 + those registered in the opam overlay. Returns a sorted, deduplicated list of 380 + package names that are dependencies but not packages in the repo itself. *) 371 381 let collect_external_deps ~fs ~config pkgs = 372 382 let monorepo = Config.Paths.monorepo config in 373 383 (* Get unique repos to avoid scanning the same directory multiple times *) ··· 411 421 (* Filter out packages that are in the repo *) 412 422 List.filter (fun dep -> not (List.mem dep pkg_names)) all_deps 413 423 414 - (** Generate dune-project content for the monorepo root. 415 - Lists all external dependencies as a virtual package. *) 424 + (** Generate dune-project content for the monorepo root. Lists all external 425 + dependencies as a virtual package. *) 416 426 let generate_dune_project ~fs ~config pkgs = 417 427 let external_deps = collect_external_deps ~fs ~config pkgs in 418 428 let buf = Buffer.create 1024 in ··· 458 468 Eio.Switch.run (fun sw -> 459 469 let child = 460 470 Eio.Process.spawn proc ~sw ~cwd:monorepo_eio 461 - [ "git"; "commit"; "-m"; "Update dune-project with external dependencies" ] 471 + [ 472 + "git"; 473 + "commit"; 474 + "-m"; 475 + "Update dune-project with external dependencies"; 476 + ] 462 477 in 463 478 ignore (Eio.Process.await child)); 464 479 Log.app (fun m -> ··· 623 638 match (scheme, host) with 624 639 | Some ("https" | "http"), Some "github.com" -> 625 640 (* https://github.com/user/repo.git -> git@github.com:user/repo.git *) 626 - let path = if String.length path > 0 && path.[0] = '/' then 627 - String.sub path 1 (String.length path - 1) 628 - else path in 641 + let path = 642 + if String.length path > 0 && path.[0] = '/' then 643 + String.sub path 1 (String.length path - 1) 644 + else path 645 + in 629 646 Printf.sprintf "git@github.com:%s" path 630 647 | Some ("https" | "http"), Some "tangled.org" -> 631 648 (* https://tangled.org/@anil.recoil.org/foo -> git@git.recoil.org:anil.recoil.org/foo *) 632 - let path = if String.length path > 0 && path.[0] = '/' then 633 - String.sub path 1 (String.length path - 1) 634 - else path in 649 + let path = 650 + if String.length path > 0 && path.[0] = '/' then 651 + String.sub path 1 (String.length path - 1) 652 + else path 653 + in 635 654 (* Strip leading @ from username if present *) 636 - let path = if String.length path > 0 && path.[0] = '@' then 637 - String.sub path 1 (String.length path - 1) 638 - else path in 655 + let path = 656 + if String.length path > 0 && path.[0] = '@' then 657 + String.sub path 1 (String.length path - 1) 658 + else path 659 + in 639 660 (* Strip .git suffix if present *) 640 - let path = if String.ends_with ~suffix:".git" path then 641 - String.sub path 0 (String.length path - 4) 642 - else path in 661 + let path = 662 + if String.ends_with ~suffix:".git" path then 663 + String.sub path 0 (String.length path - 4) 664 + else path 665 + in 643 666 Printf.sprintf "git@git.recoil.org:%s" path 644 667 | _ -> 645 668 (* Return original URL for other cases *) ··· 742 765 else begin 743 766 (* Opam repo doesn't exist - clone it if we have a URL *) 744 767 match opam_repo_url with 745 - | Some url -> 746 - Log.info (fun m -> m "Cloning opam repo from %s to %a" url Fpath.pp opam_repo); 768 + | Some url -> ( 769 + Log.info (fun m -> 770 + m "Cloning opam repo from %s to %a" url Fpath.pp opam_repo); 747 771 let url = Uri.of_string url in 748 772 let branch = Config.default_branch config in 749 - (match Git.clone ~proc ~fs:fs_t ~url ~branch opam_repo with 773 + match Git.clone ~proc ~fs:fs_t ~url ~branch opam_repo with 750 774 | Ok () -> Log.info (fun m -> m "Opam repo cloned successfully") 751 - | Error e -> Log.warn (fun m -> m "Failed to clone opam repo: %a" Git.pp_error e)) 775 + | Error e -> 776 + Log.warn (fun m -> m "Failed to clone opam repo: %a" Git.pp_error e) 777 + ) 752 778 | None -> 753 - Log.info (fun m -> m "Opam repo at %a does not exist and no URL provided" Fpath.pp opam_repo) 779 + Log.info (fun m -> 780 + m "Opam repo at %a does not exist and no URL provided" Fpath.pp 781 + opam_repo) 754 782 end; 755 783 (* Ensure directories exist before computing status *) 756 784 ensure_checkouts_dir ~fs:fs_t ~config; ··· 940 968 in 941 969 let* () = 942 970 if needs_clone then begin 943 - Log.info (fun m -> 944 - m "Creating checkout for %s" (Package.repo_name pkg)); 971 + Log.info (fun m -> m "Creating checkout for %s" (Package.repo_name pkg)); 945 972 ensure_checkout ~proc ~fs:(fs :> _ Eio.Path.t) ~config pkg 946 973 end 947 974 else Ok () ··· 952 979 Log.info (fun m -> m "Pushing subtree %s to checkout" prefix); 953 980 let* _ = 954 981 run_git_in ~proc ~cwd:monorepo_eio 955 - [ 956 - "subtree"; "push"; "--prefix"; prefix; checkout_path; sync_branch; 957 - ] 982 + [ "subtree"; "push"; "--prefix"; prefix; checkout_path; sync_branch ] 958 983 in 959 984 (* Merge sync branch into the target branch in checkout *) 960 985 Log.debug (fun m -> m "Merging %s into %s" sync_branch branch); 961 986 let* _ = 962 - run_git_in ~proc ~cwd:checkout_eio 963 - [ "merge"; "--ff-only"; sync_branch ] 987 + run_git_in ~proc ~cwd:checkout_eio [ "merge"; "--ff-only"; sync_branch ] 964 988 in 965 989 (* Delete the sync branch *) 966 990 Log.debug (fun m -> m "Cleaning up %s branch" sync_branch); 967 - ignore 968 - (run_git_in ~proc ~cwd:checkout_eio [ "branch"; "-d"; sync_branch ]); 991 + ignore (run_git_in ~proc ~cwd:checkout_eio [ "branch"; "-d"; sync_branch ]); 969 992 Ok () 970 993 end 971 994 ··· 1027 1050 m "[%d/%d] Pushing %s to %s" i total 1028 1051 (Package.repo_name pkg) push_url); 1029 1052 (* Set the push URL for origin *) 1030 - (match Git.set_push_url ~proc ~fs:fs_t ~url:push_url checkout_dir with 1031 - | Ok () -> () 1032 - | Error e -> 1033 - Log.warn (fun m -> 1034 - m "Failed to set push URL: %a" Git.pp_error e)); 1053 + (match 1054 + Git.set_push_url ~proc ~fs:fs_t ~url:push_url 1055 + checkout_dir 1056 + with 1057 + | Ok () -> () 1058 + | Error e -> 1059 + Log.warn (fun m -> 1060 + m "Failed to set push URL: %a" Git.pp_error e)); 1035 1061 match 1036 1062 Git.push_remote ~proc ~fs:fs_t ~branch checkout_dir 1037 1063 with ··· 1073 1099 | `Push_remote -> Fmt.string ppf "push-remote" 1074 1100 1075 1101 let pp_sync_failure ppf f = 1076 - Fmt.pf ppf "%s (%a): %a" f.repo_name pp_sync_phase f.phase Git.pp_error f.error 1102 + Fmt.pf ppf "%s (%a): %a" f.repo_name pp_sync_phase f.phase Git.pp_error 1103 + f.error 1077 1104 1078 1105 let pp_sync_summary ppf s = 1079 1106 Fmt.pf ppf "Synced: %d, Unchanged: %d, Pulled: %d commits, Pushed: %d commits" 1080 1107 s.repos_synced s.repos_unchanged s.commits_pulled s.commits_pushed; 1081 1108 if s.errors <> [] then 1082 - Fmt.pf ppf "@.Errors (%d):@. @[<v>%a@]" 1083 - (List.length s.errors) 1084 - Fmt.(list ~sep:cut pp_sync_failure) s.errors 1109 + Fmt.pf ppf "@.Errors (%d):@. @[<v>%a@]" (List.length s.errors) 1110 + Fmt.(list ~sep:cut pp_sync_failure) 1111 + s.errors 1085 1112 1086 1113 (* Helper to ensure checkout exists, returning whether it was cloned *) 1087 1114 let ensure_checkout_safe ~proc ~fs ~config pkg = ··· 1100 1127 Log.info (fun m -> 1101 1128 m "Cloning %s from %a (branch: %s)" (Package.repo_name pkg) Uri.pp 1102 1129 (Package.dev_repo pkg) branch); 1103 - match Git.clone ~proc ~fs ~url:(Package.dev_repo pkg) ~branch checkout_dir with 1130 + match 1131 + Git.clone ~proc ~fs ~url:(Package.dev_repo pkg) ~branch checkout_dir 1132 + with 1104 1133 | Ok () -> Ok (true, 0) 1105 1134 | Error e -> Error e 1106 1135 end ··· 1146 1175 Log.info (fun m -> m "Pushing %s to %s" (Package.repo_name pkg) push_url); 1147 1176 (* Set the push URL for origin *) 1148 1177 (match Git.set_push_url ~proc ~fs ~url:push_url checkout_dir with 1149 - | Ok () -> () 1150 - | Error e -> 1151 - Log.warn (fun m -> m "Failed to set push URL: %a" Git.pp_error e)); 1178 + | Ok () -> () 1179 + | Error e -> Log.warn (fun m -> m "Failed to set push URL: %a" Git.pp_error e)); 1152 1180 Git.push_remote ~proc ~fs ~branch checkout_dir 1153 1181 1154 1182 (* Sanitize handle for use as git remote name *) 1155 1183 let sanitize_remote_name handle = 1156 1184 (* Replace @ and . with - for valid git remote names *) 1157 - String.map (function 1158 - | '@' | '.' -> '-' 1159 - | c -> c) handle 1185 + String.map (function '@' | '.' -> '-' | c -> c) handle 1160 1186 1161 1187 (* Ensure verse remotes for a single repo *) 1162 1188 let ensure_verse_remotes_for_repo ~proc ~fs ~config ~verse_subtrees pkg = ··· 1169 1195 else begin 1170 1196 (* Get all verse members who have this repo *) 1171 1197 let members_with_repo = 1172 - Hashtbl.find_opt verse_subtrees repo_name 1173 - |> Option.value ~default:[] 1198 + Hashtbl.find_opt verse_subtrees repo_name |> Option.value ~default:[] 1174 1199 in 1175 1200 1176 1201 (* Get current remotes *) 1177 1202 let current_remotes = Git.list_remotes ~proc ~fs checkout_dir in 1178 1203 let verse_remotes = 1179 - List.filter (fun r -> String.starts_with ~prefix:"verse-" r) current_remotes 1204 + List.filter 1205 + (fun r -> String.starts_with ~prefix:"verse-" r) 1206 + current_remotes 1180 1207 in 1181 1208 1182 1209 (* Build set of expected verse remotes *) 1183 1210 let expected_remotes = 1184 - List.map (fun (handle, _) -> "verse-" ^ sanitize_remote_name handle) members_with_repo 1211 + List.map 1212 + (fun (handle, _) -> "verse-" ^ sanitize_remote_name handle) 1213 + members_with_repo 1185 1214 in 1186 1215 1187 1216 (* Add/update remotes for verse members *) 1188 - List.iter (fun (handle, verse_mono_path) -> 1217 + List.iter 1218 + (fun (handle, verse_mono_path) -> 1189 1219 let remote_name = "verse-" ^ sanitize_remote_name handle in 1190 1220 (* Point to their src/ checkout for this repo *) 1191 1221 let verse_src = Fpath.(parent verse_mono_path / "src" / repo_name) in 1192 1222 if Sys.file_exists (Fpath.to_string verse_src) then begin 1193 1223 let url = Fpath.to_string verse_src in 1194 - match Git.ensure_remote ~proc ~fs ~name:remote_name ~url checkout_dir with 1195 - | Ok () -> Log.debug (fun m -> m "Ensured verse remote %s -> %s" remote_name url) 1196 - | Error e -> Log.warn (fun m -> m "Failed to add verse remote %s: %a" remote_name Git.pp_error e) 1224 + match 1225 + Git.ensure_remote ~proc ~fs ~name:remote_name ~url checkout_dir 1226 + with 1227 + | Ok () -> 1228 + Log.debug (fun m -> 1229 + m "Ensured verse remote %s -> %s" remote_name url) 1230 + | Error e -> 1231 + Log.warn (fun m -> 1232 + m "Failed to add verse remote %s: %a" remote_name Git.pp_error 1233 + e) 1197 1234 end) 1198 1235 members_with_repo; 1199 1236 1200 1237 (* Remove outdated verse remotes *) 1201 - List.iter (fun remote_name -> 1238 + List.iter 1239 + (fun remote_name -> 1202 1240 if not (List.mem remote_name expected_remotes) then begin 1203 1241 Log.debug (fun m -> m "Removing outdated verse remote %s" remote_name); 1204 1242 match Git.remove_remote ~proc ~fs ~name:remote_name checkout_dir with 1205 1243 | Ok () -> () 1206 - | Error e -> Log.warn (fun m -> m "Failed to remove verse remote %s: %a" remote_name Git.pp_error e) 1244 + | Error e -> 1245 + Log.warn (fun m -> 1246 + m "Failed to remove verse remote %s: %a" remote_name 1247 + Git.pp_error e) 1207 1248 end) 1208 1249 verse_remotes 1209 1250 end ··· 1211 1252 (* Sync verse remotes for all repos *) 1212 1253 let sync_verse_remotes ~proc ~fs ~config ~verse_config repos = 1213 1254 Log.app (fun m -> m " Updating verse remotes..."); 1214 - let verse_subtrees = Verse.get_verse_subtrees ~proc ~fs ~config:verse_config () in 1215 - List.iter (fun pkg -> 1255 + let verse_subtrees = 1256 + Verse.get_verse_subtrees ~proc ~fs ~config:verse_config () 1257 + in 1258 + List.iter 1259 + (fun pkg -> 1216 1260 ensure_verse_remotes_for_repo ~proc ~fs ~config ~verse_subtrees pkg) 1217 1261 repos 1218 1262 ··· 1221 1265 let checkouts_root = Config.Paths.checkouts config in 1222 1266 let checkout_dir = Package.checkout_dir ~checkouts_root pkg in 1223 1267 let remotes = Git.list_remotes ~proc ~fs checkout_dir in 1224 - let verse_remotes = List.filter (fun r -> String.starts_with ~prefix:"verse-" r) remotes in 1225 - List.iter (fun remote -> 1268 + let verse_remotes = 1269 + List.filter (fun r -> String.starts_with ~prefix:"verse-" r) remotes 1270 + in 1271 + List.iter 1272 + (fun remote -> 1226 1273 Log.debug (fun m -> m "Fetching from verse remote %s" remote); 1227 1274 match Git.fetch ~proc ~fs ~remote checkout_dir with 1228 1275 | Ok () -> () 1229 - | Error e -> Log.debug (fun m -> m "Failed to fetch from %s: %a" remote Git.pp_error e)) 1276 + | Error e -> 1277 + Log.debug (fun m -> 1278 + m "Failed to fetch from %s: %a" remote Git.pp_error e)) 1230 1279 verse_remotes 1231 1280 1232 - let sync ~proc ~fs ~config ?package ?(remote = false) ?(skip_push = false) ?(skip_pull = false) () = 1281 + let sync ~proc ~fs ~config ?package ?(remote = false) ?(skip_push = false) 1282 + ?(skip_pull = false) () = 1233 1283 let fs_t = fs_typed fs in 1234 1284 (* Update the opam repo first - clone if needed *) 1235 1285 let opam_repo = Config.Paths.opam_repo config in ··· 1279 1329 (* git subtree push is read-only on the monorepo, so safe to parallelize *) 1280 1330 let push_results = 1281 1331 if skip_push then begin 1282 - Log.app (fun m -> m " Skipping push to checkouts (--skip-push)"); 1332 + Log.app (fun m -> 1333 + m " Skipping push to checkouts (--skip-push)"); 1283 1334 List.map (fun pkg -> Ok (Package.repo_name pkg)) repos 1284 1335 end 1285 1336 else begin 1286 - Log.app (fun m -> m " Pushing monorepo changes to checkouts (parallel)..."); 1287 - Eio.Fiber.List.map ~max_fibers:4 (fun pkg -> 1288 - let repo_name = Package.repo_name pkg in 1289 - Log.info (fun m -> m "Push to checkout: %s" repo_name); 1290 - match push_one ~proc ~fs ~config pkg with 1291 - | Ok () -> Ok repo_name 1292 - | Error (Git_error e) -> 1293 - Error { repo_name; phase = `Push_checkout; error = e } 1294 - | Error _ -> Ok repo_name) 1337 + Log.app (fun m -> 1338 + m " Pushing monorepo changes to checkouts (parallel)..."); 1339 + Eio.Fiber.List.map ~max_fibers:4 1340 + (fun pkg -> 1341 + let repo_name = Package.repo_name pkg in 1342 + Log.info (fun m -> m "Push to checkout: %s" repo_name); 1343 + match push_one ~proc ~fs ~config pkg with 1344 + | Ok () -> Ok repo_name 1345 + | Error (Git_error e) -> 1346 + Error { repo_name; phase = `Push_checkout; error = e } 1347 + | Error _ -> Ok repo_name) 1295 1348 repos 1296 1349 end 1297 1350 in 1298 1351 let push_errors = 1299 - List.filter_map (function Error e -> Some e | Ok _ -> None) push_results 1352 + List.filter_map 1353 + (function Error e -> Some e | Ok _ -> None) 1354 + push_results 1300 1355 in 1301 1356 1302 1357 (* Steps 3-5: Pull phases (fetch, merge, subtree) - skip if --skip-pull *) 1303 - let fetch_errors, unchanged_count, total_commits_pulled, merge_errors, subtree_errors = 1358 + let ( fetch_errors, 1359 + unchanged_count, 1360 + total_commits_pulled, 1361 + merge_errors, 1362 + subtree_errors ) = 1304 1363 if skip_pull then begin 1305 - Log.app (fun m -> m " Skipping pull from remotes (--skip-pull)"); 1364 + Log.app (fun m -> 1365 + m " Skipping pull from remotes (--skip-pull)"); 1306 1366 ([], List.length repos, 0, ref [], ref []) 1307 1367 end 1308 1368 else begin 1309 1369 (* Step 3: Fetch phase - clone/fetch from remotes (PARALLEL) *) 1310 1370 Log.app (fun m -> m " Fetching from remotes (parallel)..."); 1311 - let fetch_results = Eio.Fiber.List.map ~max_fibers:4 (fun pkg -> 1312 - let repo_name = Package.repo_name pkg in 1313 - (* First ensure checkout exists *) 1314 - match ensure_checkout_safe ~proc ~fs:fs_t ~config pkg with 1315 - | Error e -> Error { repo_name; phase = `Fetch; error = e } 1316 - | Ok (was_cloned, _) -> 1317 - if was_cloned then Ok (repo_name, true, 0) 1318 - else 1319 - match fetch_checkout_safe ~proc ~fs:fs_t ~config pkg with 1320 - | Error e -> Error { repo_name; phase = `Fetch; error = e } 1321 - | Ok commits -> Ok (repo_name, false, commits)) 1322 - repos 1371 + let fetch_results = 1372 + Eio.Fiber.List.map ~max_fibers:4 1373 + (fun pkg -> 1374 + let repo_name = Package.repo_name pkg in 1375 + (* First ensure checkout exists *) 1376 + match 1377 + ensure_checkout_safe ~proc ~fs:fs_t ~config pkg 1378 + with 1379 + | Error e -> 1380 + Error { repo_name; phase = `Fetch; error = e } 1381 + | Ok (was_cloned, _) -> ( 1382 + if was_cloned then Ok (repo_name, true, 0) 1383 + else 1384 + match 1385 + fetch_checkout_safe ~proc ~fs:fs_t ~config pkg 1386 + with 1387 + | Error e -> 1388 + Error { repo_name; phase = `Fetch; error = e } 1389 + | Ok commits -> Ok (repo_name, false, commits))) 1390 + repos 1323 1391 in 1324 1392 let fetch_errs, fetch_successes = 1325 - List.partition_map (function 1326 - | Error e -> Left e 1327 - | Ok r -> Right r) 1393 + List.partition_map 1394 + (function Error e -> Left e | Ok r -> Right r) 1328 1395 fetch_results 1329 1396 in 1330 - let cloned = List.filter (fun (_, c, _) -> c) fetch_successes in 1331 - let updated = List.filter (fun (_, c, commits) -> not c && commits > 0) fetch_successes in 1332 - let unchanged = List.length fetch_successes - List.length cloned - List.length updated in 1333 - let commits_pulled = List.fold_left (fun acc (_, _, c) -> acc + c) 0 fetch_successes in 1334 - Log.app (fun m -> m " Pulled: %d cloned, %d updated, %d unchanged" 1335 - (List.length cloned) (List.length updated) unchanged); 1397 + let cloned = 1398 + List.filter (fun (_, c, _) -> c) fetch_successes 1399 + in 1400 + let updated = 1401 + List.filter 1402 + (fun (_, c, commits) -> (not c) && commits > 0) 1403 + fetch_successes 1404 + in 1405 + let unchanged = 1406 + List.length fetch_successes 1407 + - List.length cloned - List.length updated 1408 + in 1409 + let commits_pulled = 1410 + List.fold_left 1411 + (fun acc (_, _, c) -> acc + c) 1412 + 0 fetch_successes 1413 + in 1414 + Log.app (fun m -> 1415 + m " Pulled: %d cloned, %d updated, %d unchanged" 1416 + (List.length cloned) (List.length updated) unchanged); 1336 1417 1337 1418 (* Step 4: Merge phase - fast-forward merge checkouts (SEQUENTIAL) *) 1338 1419 Log.app (fun m -> m " Merging checkouts..."); 1339 1420 let merge_errs = ref [] in 1340 - List.iter (fun pkg -> 1421 + List.iter 1422 + (fun pkg -> 1341 1423 match merge_checkout_safe ~proc ~fs:fs_t ~config pkg with 1342 1424 | Ok () -> () 1343 1425 | Error e -> 1344 - merge_errs := { repo_name = Package.repo_name pkg; 1345 - phase = `Merge; error = e } :: !merge_errs) 1426 + merge_errs := 1427 + { 1428 + repo_name = Package.repo_name pkg; 1429 + phase = `Merge; 1430 + error = e; 1431 + } 1432 + :: !merge_errs) 1346 1433 repos; 1347 1434 1348 1435 (* Step 5: Subtree phase - pull subtrees into monorepo (SEQUENTIAL) *) ··· 1352 1439 let subtree_errs = ref [] in 1353 1440 if monorepo_dirty then begin 1354 1441 Log.warn (fun m -> 1355 - m "Monorepo has uncommitted changes, skipping subtree pulls"); 1356 - Log.app (fun m -> m " Skipping subtree updates (local modifications)...") 1442 + m 1443 + "Monorepo has uncommitted changes, skipping subtree \ 1444 + pulls"); 1445 + Log.app (fun m -> 1446 + m " Skipping subtree updates (local modifications)...") 1357 1447 end 1358 1448 else begin 1359 1449 Log.app (fun m -> m " Updating subtrees..."); 1360 - List.iteri (fun i pkg -> 1450 + List.iteri 1451 + (fun i pkg -> 1361 1452 Log.info (fun m -> 1362 1453 m "[%d/%d] Subtree %s" (i + 1) total 1363 1454 (Package.subtree_prefix pkg)); 1364 1455 match pull_subtree ~proc ~fs ~config pkg with 1365 1456 | Ok _ -> () 1366 1457 | Error (Git_error e) -> 1367 - subtree_errs := { repo_name = Package.repo_name pkg; 1368 - phase = `Subtree; error = e } :: !subtree_errs 1458 + subtree_errs := 1459 + { 1460 + repo_name = Package.repo_name pkg; 1461 + phase = `Subtree; 1462 + error = e; 1463 + } 1464 + :: !subtree_errs 1369 1465 | Error _ -> ()) 1370 1466 repos 1371 1467 end; 1372 - (fetch_errs, unchanged, commits_pulled, merge_errs, subtree_errs) 1468 + ( fetch_errs, 1469 + unchanged, 1470 + commits_pulled, 1471 + merge_errs, 1472 + subtree_errs ) 1373 1473 end 1374 1474 in 1375 1475 1376 1476 (* Step 5.5: Verse remotes - update and fetch from verse members *) 1377 1477 (match Verse_config.load ~fs:(fs_t :> _ Eio.Path.t) () with 1378 - | Error _ -> () (* No verse config, skip verse remotes *) 1379 - | Ok verse_config -> 1380 - sync_verse_remotes ~proc ~fs:fs_t ~config ~verse_config repos; 1381 - (* Fetch from verse remotes in parallel *) 1382 - Log.app (fun m -> m " Fetching from verse remotes..."); 1383 - Eio.Fiber.List.iter ~max_fibers:4 (fun pkg -> 1384 - fetch_verse_remotes ~proc ~fs:fs_t ~config pkg) 1385 - repos); 1478 + | Error _ -> () (* No verse config, skip verse remotes *) 1479 + | Ok verse_config -> 1480 + sync_verse_remotes ~proc ~fs:fs_t ~config ~verse_config repos; 1481 + (* Fetch from verse remotes in parallel *) 1482 + Log.app (fun m -> m " Fetching from verse remotes..."); 1483 + Eio.Fiber.List.iter ~max_fibers:4 1484 + (fun pkg -> fetch_verse_remotes ~proc ~fs:fs_t ~config pkg) 1485 + repos); 1386 1486 1387 1487 (* Step 6: Finalize - write README.md, CLAUDE.md, and dune-project (SEQUENTIAL) *) 1388 - Log.app (fun m -> m " Writing README.md, CLAUDE.md, and dune-project..."); 1488 + Log.app (fun m -> 1489 + m " Writing README.md, CLAUDE.md, and dune-project..."); 1389 1490 write_readme ~proc ~fs:fs_t ~config all_pkgs; 1390 1491 write_claude_md ~proc ~fs:fs_t ~config; 1391 1492 write_dune_project ~proc ~fs:fs_t ~config all_pkgs; ··· 1395 1496 if remote then begin 1396 1497 Log.app (fun m -> m " Pushing to upstream remotes..."); 1397 1498 (* Limit to 2 concurrent pushes to avoid overwhelming remotes *) 1398 - let push_results = Eio.Fiber.List.map ~max_fibers:2 (fun pkg -> 1399 - let repo_name = Package.repo_name pkg in 1400 - match push_remote_safe ~proc ~fs:fs_t ~config pkg with 1401 - | Error e -> Error { repo_name; phase = `Push_remote; error = e } 1402 - | Ok () -> 1403 - Log.app (fun m -> m " Pushed %s" repo_name); 1404 - Ok repo_name) 1405 - repos 1499 + let push_results = 1500 + Eio.Fiber.List.map ~max_fibers:2 1501 + (fun pkg -> 1502 + let repo_name = Package.repo_name pkg in 1503 + match push_remote_safe ~proc ~fs:fs_t ~config pkg with 1504 + | Error e -> 1505 + Error { repo_name; phase = `Push_remote; error = e } 1506 + | Ok () -> 1507 + Log.app (fun m -> m " Pushed %s" repo_name); 1508 + Ok repo_name) 1509 + repos 1406 1510 in 1407 1511 let errors, successes = 1408 - List.partition_map (function 1409 - | Error e -> Left e 1410 - | Ok r -> Right r) 1512 + List.partition_map 1513 + (function Error e -> Left e | Ok r -> Right r) 1411 1514 push_results 1412 1515 in 1413 - Log.app (fun m -> m " Pushed: %d repos to upstream" (List.length successes)); 1516 + Log.app (fun m -> 1517 + m " Pushed: %d repos to upstream" (List.length successes)); 1414 1518 errors 1415 1519 end 1416 1520 else [] ··· 1418 1522 1419 1523 (* Collect all errors *) 1420 1524 let all_errors = 1421 - push_errors @ fetch_errors @ !merge_errors @ !subtree_errors @ remote_errors 1525 + push_errors @ fetch_errors @ !merge_errors @ !subtree_errors 1526 + @ remote_errors 1422 1527 in 1423 - let summary = { 1424 - repos_synced = List.length repos - List.length all_errors; 1425 - repos_unchanged = unchanged_count; 1426 - commits_pulled = total_commits_pulled; 1427 - commits_pushed = 0; (* TODO: track this *) 1428 - errors = all_errors; 1429 - } in 1528 + let summary = 1529 + { 1530 + repos_synced = List.length repos - List.length all_errors; 1531 + repos_unchanged = unchanged_count; 1532 + commits_pulled = total_commits_pulled; 1533 + commits_pushed = 0; 1534 + (* TODO: track this *) 1535 + errors = all_errors; 1536 + } 1537 + in 1430 1538 1431 1539 (* Print summary *) 1432 - Log.app (fun m -> m "@.Summary: %d synced, %d errors" 1433 - summary.repos_synced (List.length summary.errors)); 1540 + Log.app (fun m -> 1541 + m "@.Summary: %d synced, %d errors" summary.repos_synced 1542 + (List.length summary.errors)); 1434 1543 if summary.errors <> [] then 1435 - List.iter (fun e -> 1436 - Log.warn (fun m -> m " %a" pp_sync_failure e)) 1544 + List.iter 1545 + (fun e -> Log.warn (fun m -> m " %a" pp_sync_failure e)) 1437 1546 summary.errors; 1438 1547 1439 1548 Ok summary ··· 1443 1552 (* Opam metadata sync: copy .opam files from monorepo subtrees to opam-repo *) 1444 1553 1445 1554 type opam_sync_result = { 1446 - synced : string list; (* packages that were updated *) 1447 - unchanged : string list; (* packages that were already in sync *) 1448 - missing : string list; (* packages where monorepo has no .opam file *) 1449 - orphaned : string list; (* packages in opam-repo but subtree missing from monorepo *) 1555 + synced : string list; (* packages that were updated *) 1556 + unchanged : string list; (* packages that were already in sync *) 1557 + missing : string list; (* packages where monorepo has no .opam file *) 1558 + orphaned : string list; 1559 + (* packages in opam-repo but subtree missing from monorepo *) 1450 1560 } 1451 1561 1452 1562 let pp_opam_sync_result ppf r = ··· 1455 1565 (List.length r.orphaned) 1456 1566 1457 1567 (* Read file contents safely, returning None if file doesn't exist *) 1458 - let read_file_opt path = 1459 - try Some (Eio.Path.load path) 1460 - with Eio.Io _ -> None 1568 + let read_file_opt path = try Some (Eio.Path.load path) with Eio.Io _ -> None 1461 1569 1462 1570 (* Sync a single package's opam file from monorepo to opam-repo *) 1463 1571 let sync_opam_file ~proc ~fs ~config pkg = ··· 1468 1576 let version = Package.version pkg in 1469 1577 1470 1578 (* Source: monorepo/<subtree>/<name>.opam *) 1471 - let src_path = Eio.Path.(fs / Fpath.to_string monorepo / subtree_prefix / (name ^ ".opam")) in 1579 + let src_path = 1580 + Eio.Path.(fs / Fpath.to_string monorepo / subtree_prefix / (name ^ ".opam")) 1581 + in 1472 1582 1473 1583 (* Destination: opam-repo/packages/<name>/<name>.<version>/opam *) 1474 - let pkg_dir = Fpath.(opam_repo / "packages" / name / (name ^ "." ^ version)) in 1584 + let pkg_dir = 1585 + Fpath.(opam_repo / "packages" / name / (name ^ "." ^ version)) 1586 + in 1475 1587 let dst_path = Eio.Path.(fs / Fpath.to_string pkg_dir / "opam") in 1476 1588 1477 1589 match read_file_opt src_path with ··· 1480 1592 `Missing name 1481 1593 | Some src_content -> 1482 1594 let dst_content = read_file_opt dst_path in 1483 - if Some src_content = dst_content then 1484 - `Unchanged name 1595 + if Some src_content = dst_content then `Unchanged name 1485 1596 else begin 1486 1597 (* Create destination directory if needed *) 1487 1598 let pkg_dir_eio = Eio.Path.(fs / Fpath.to_string pkg_dir) in ··· 1491 1602 Eio.Path.save ~create:(`Or_truncate 0o644) dst_path src_content; 1492 1603 (* Stage the change *) 1493 1604 let opam_repo_eio = Eio.Path.(fs / Fpath.to_string opam_repo) in 1494 - let rel_path = Printf.sprintf "packages/%s/%s.%s/opam" name name version in 1605 + let rel_path = 1606 + Printf.sprintf "packages/%s/%s.%s/opam" name name version 1607 + in 1495 1608 Eio.Switch.run (fun sw -> 1496 1609 let child = 1497 1610 Eio.Process.spawn proc ~sw ~cwd:opam_repo_eio ··· 1515 1628 if pkgs = [] && package <> None then 1516 1629 Error (Package_not_found (Option.get package)) 1517 1630 else begin 1518 - Log.app (fun m -> m "Syncing opam files for %d packages..." (List.length pkgs)); 1631 + Log.app (fun m -> 1632 + m "Syncing opam files for %d packages..." (List.length pkgs)); 1519 1633 let synced = ref [] in 1520 1634 let unchanged = ref [] in 1521 1635 let missing = ref [] in 1522 1636 let orphaned = ref [] in 1523 1637 1524 1638 (* Check each package *) 1525 - List.iter (fun pkg -> 1639 + List.iter 1640 + (fun pkg -> 1526 1641 (* Check if the subtree exists in monorepo *) 1527 1642 let monorepo = Config.Paths.monorepo config in 1528 1643 let subtree_prefix = Package.subtree_prefix pkg in 1529 - let subtree_exists = Git.Subtree.exists ~fs ~repo:monorepo ~prefix:subtree_prefix in 1644 + let subtree_exists = 1645 + Git.Subtree.exists ~fs ~repo:monorepo ~prefix:subtree_prefix 1646 + in 1530 1647 1531 1648 if not subtree_exists then 1532 1649 (* Subtree doesn't exist - package is orphaned in opam-repo *) ··· 1538 1655 | `Missing name -> missing := name :: !missing) 1539 1656 pkgs; 1540 1657 1541 - let result = { 1542 - synced = List.rev !synced; 1543 - unchanged = List.rev !unchanged; 1544 - missing = List.rev !missing; 1545 - orphaned = List.rev !orphaned; 1546 - } in 1658 + let result = 1659 + { 1660 + synced = List.rev !synced; 1661 + unchanged = List.rev !unchanged; 1662 + missing = List.rev !missing; 1663 + orphaned = List.rev !orphaned; 1664 + } 1665 + in 1547 1666 1548 1667 (* Commit if there were changes *) 1549 1668 if result.synced <> [] then begin 1550 1669 let opam_repo = Config.Paths.opam_repo config in 1551 1670 let opam_repo_eio = Eio.Path.(fs / Fpath.to_string opam_repo) in 1552 - let msg = Printf.sprintf "Sync opam files from monorepo (%d packages)" 1553 - (List.length result.synced) in 1671 + let msg = 1672 + Printf.sprintf "Sync opam files from monorepo (%d packages)" 1673 + (List.length result.synced) 1674 + in 1554 1675 Eio.Switch.run (fun sw -> 1555 1676 let child = 1556 1677 Eio.Process.spawn proc ~sw ~cwd:opam_repo_eio ··· 1562 1683 1563 1684 (* Report orphaned packages *) 1564 1685 if result.orphaned <> [] then begin 1565 - Log.warn (fun m -> m "Found %d orphaned packages in opam-repo (subtree missing from monorepo):" 1566 - (List.length result.orphaned)); 1567 - List.iter (fun name -> 1568 - Log.warn (fun m -> m " %s" name)) 1686 + Log.warn (fun m -> 1687 + m 1688 + "Found %d orphaned packages in opam-repo (subtree missing from \ 1689 + monorepo):" 1690 + (List.length result.orphaned)); 1691 + List.iter 1692 + (fun name -> Log.warn (fun m -> m " %s" name)) 1569 1693 result.orphaned; 1570 - Log.warn (fun m -> m "To remove, delete from opam-repo/packages/ and commit.") 1694 + Log.warn (fun m -> 1695 + m "To remove, delete from opam-repo/packages/ and commit.") 1571 1696 end; 1572 1697 1573 1698 Log.app (fun m -> m "%a" pp_opam_sync_result result); ··· 1604 1729 1605 1730 (* Changes command - generate weekly changelogs using Claude *) 1606 1731 1607 - let changes ~proc ~fs ~config ~clock ?package ?(weeks = 1) ?(history = 12) ?(dry_run = false) () = 1732 + let changes ~proc ~fs ~config ~clock ?package ?(weeks = 1) ?(history = 12) 1733 + ?(dry_run = false) () = 1608 1734 let fs_t = fs_typed fs in 1609 1735 let monorepo = Config.Paths.monorepo config in 1610 1736 1611 1737 (* Get current time and calculate week boundaries *) 1612 1738 let now = Eio.Time.now clock in 1613 - let now_ptime = match Ptime.of_float_s now with 1614 - | Some t -> t 1615 - | None -> Ptime.v (0, 0L) (* fallback to epoch *) 1739 + let now_ptime = 1740 + match Ptime.of_float_s now with Some t -> t | None -> Ptime.v (0, 0L) 1741 + (* fallback to epoch *) 1616 1742 in 1617 1743 1618 1744 match discover_packages ~fs:(fs_t :> _ Eio.Path.t) ~config () with 1619 1745 | Error e -> Error e 1620 1746 | Ok all_pkgs -> 1621 1747 let repos = unique_repos all_pkgs in 1622 - let repos = match package with 1748 + let repos = 1749 + match package with 1623 1750 | None -> repos 1624 1751 | Some name -> List.filter (fun p -> Package.repo_name p = name) repos 1625 1752 in 1626 1753 if repos = [] && package <> None then 1627 1754 Error (Package_not_found (Option.get package)) 1628 1755 else begin 1629 - Log.info (fun m -> m "Processing changelogs for %d repositories" (List.length repos)); 1756 + Log.info (fun m -> 1757 + m "Processing changelogs for %d repositories" (List.length repos)); 1630 1758 1631 1759 (* Process each repository *) 1632 1760 let all_changes_files = ref [] in 1633 1761 let rec process_repos = function 1634 1762 | [] -> Ok () 1635 - | pkg :: rest -> 1763 + | pkg :: rest -> ( 1636 1764 let repo_name = Package.repo_name pkg in 1637 1765 1638 1766 Log.info (fun m -> m "Processing %s" repo_name); ··· 1640 1768 (* Load existing changes from .changes/<repo>.json *) 1641 1769 match Changes.load ~fs:fs_t ~monorepo repo_name with 1642 1770 | Error e -> Error (Claude_error e) 1643 - | Ok changes_file -> 1771 + | Ok changes_file -> ( 1644 1772 (* Process each week *) 1645 1773 let rec process_weeks week_offset updated_cf = 1646 1774 if week_offset >= weeks then Ok updated_cf 1647 1775 else begin 1648 1776 (* Calculate week boundaries *) 1649 - let offset_seconds = float_of_int (week_offset * 7 * 24 * 60 * 60) in 1650 - let week_time = match Ptime.of_float_s (now -. offset_seconds) with 1777 + let offset_seconds = 1778 + float_of_int (week_offset * 7 * 24 * 60 * 60) 1779 + in 1780 + let week_time = 1781 + match Ptime.of_float_s (now -. offset_seconds) with 1651 1782 | Some t -> t 1652 1783 | None -> now_ptime 1653 1784 in 1654 - let week_start, week_end = Changes.week_of_ptime week_time in 1785 + let week_start, week_end = 1786 + Changes.week_of_ptime week_time 1787 + in 1655 1788 1656 1789 (* Skip if week already has an entry *) 1657 1790 if Changes.has_week updated_cf ~week_start then begin 1658 - Log.info (fun m -> m " Week %s already has entry, skipping" week_start); 1791 + Log.info (fun m -> 1792 + m " Week %s already has entry, skipping" week_start); 1659 1793 process_weeks (week_offset + 1) updated_cf 1660 1794 end 1661 1795 else begin 1662 1796 (* Get commits for this week *) 1663 1797 let since = week_start ^ " 00:00:00" in 1664 1798 let until = week_end ^ " 23:59:59" in 1665 - match Git.log ~proc ~fs:fs_t ~since ~until ~path:repo_name monorepo with 1799 + match 1800 + Git.log ~proc ~fs:fs_t ~since ~until ~path:repo_name 1801 + monorepo 1802 + with 1666 1803 | Error e -> Error (Git_error e) 1667 1804 | Ok commits -> 1668 1805 if commits = [] then begin 1669 - Log.info (fun m -> m " No commits for week %s" week_start); 1806 + Log.info (fun m -> 1807 + m " No commits for week %s" week_start); 1670 1808 process_weeks (week_offset + 1) updated_cf 1671 1809 end 1672 1810 else begin 1673 - Log.info (fun m -> m " Found %d commits for week %s" (List.length commits) week_start); 1811 + Log.info (fun m -> 1812 + m " Found %d commits for week %s" 1813 + (List.length commits) week_start); 1674 1814 1675 1815 if dry_run then begin 1676 - Log.app (fun m -> m " [DRY RUN] Would analyze %d commits for %s week %s" 1677 - (List.length commits) repo_name week_start); 1816 + Log.app (fun m -> 1817 + m 1818 + " [DRY RUN] Would analyze %d commits \ 1819 + for %s week %s" 1820 + (List.length commits) repo_name week_start); 1678 1821 process_weeks (week_offset + 1) updated_cf 1679 1822 end 1680 1823 else begin 1681 1824 (* Analyze commits with Claude *) 1682 1825 Eio.Switch.run @@ fun sw -> 1683 - match Changes.analyze_commits ~sw ~process_mgr:proc ~clock 1684 - ~repository:repo_name ~week_start ~week_end commits with 1826 + match 1827 + Changes.analyze_commits ~sw ~process_mgr:proc 1828 + ~clock ~repository:repo_name ~week_start 1829 + ~week_end commits 1830 + with 1685 1831 | Error e -> Error (Claude_error e) 1686 1832 | Ok None -> 1687 - Log.info (fun m -> m " No user-facing changes for week %s" week_start); 1833 + Log.info (fun m -> 1834 + m " No user-facing changes for week %s" 1835 + week_start); 1688 1836 process_weeks (week_offset + 1) updated_cf 1689 1837 | Ok (Some response) -> 1690 - Log.app (fun m -> m " Generated changelog for %s week %s" repo_name week_start); 1838 + Log.app (fun m -> 1839 + m " Generated changelog for %s week %s" 1840 + repo_name week_start); 1691 1841 (* Create new entry *) 1692 - let first_hash = (List.hd commits).Git.hash in 1693 - let last_hash = (List.hd (List.rev commits)).Git.hash in 1694 - let entry : Changes.weekly_entry = { 1695 - week_start; 1696 - week_end; 1697 - summary = response.Changes.summary; 1698 - changes = response.Changes.changes; 1699 - commit_range = { 1700 - from_hash = String.sub first_hash 0 (min 7 (String.length first_hash)); 1701 - to_hash = String.sub last_hash 0 (min 7 (String.length last_hash)); 1702 - count = List.length commits; 1703 - }; 1704 - } in 1842 + let first_hash = 1843 + (List.hd commits).Git.hash 1844 + in 1845 + let last_hash = 1846 + (List.hd (List.rev commits)).Git.hash 1847 + in 1848 + let entry : Changes.weekly_entry = 1849 + { 1850 + week_start; 1851 + week_end; 1852 + summary = response.Changes.summary; 1853 + changes = response.Changes.changes; 1854 + commit_range = 1855 + { 1856 + from_hash = 1857 + String.sub first_hash 0 1858 + (min 7 1859 + (String.length first_hash)); 1860 + to_hash = 1861 + String.sub last_hash 0 1862 + (min 7 (String.length last_hash)); 1863 + count = List.length commits; 1864 + }; 1865 + } 1866 + in 1705 1867 (* Add entry (sorted by date descending) *) 1706 1868 let new_entries = 1707 1869 entry :: updated_cf.Changes.entries 1708 1870 |> List.sort (fun e1 e2 -> 1709 - String.compare e2.Changes.week_start e1.Changes.week_start) 1871 + String.compare e2.Changes.week_start 1872 + e1.Changes.week_start) 1710 1873 in 1711 1874 process_weeks (week_offset + 1) 1712 1875 { updated_cf with entries = new_entries } ··· 1717 1880 in 1718 1881 match process_weeks 0 changes_file with 1719 1882 | Error e -> Error e 1720 - | Ok updated_cf -> 1883 + | Ok updated_cf -> ( 1721 1884 (* Save if changed and not dry run *) 1722 1885 let save_result = 1723 - if not dry_run && updated_cf.entries <> changes_file.entries then 1886 + if 1887 + (not dry_run) 1888 + && updated_cf.entries <> changes_file.entries 1889 + then ( 1724 1890 match Changes.save ~fs:fs_t ~monorepo updated_cf with 1725 1891 | Error e -> Error (Claude_error e) 1726 1892 | Ok () -> 1727 - Log.app (fun m -> m "Saved .changes/%s.json" repo_name); 1728 - Ok () 1893 + Log.app (fun m -> 1894 + m "Saved .changes/%s.json" repo_name); 1895 + Ok ()) 1729 1896 else Ok () 1730 1897 in 1731 1898 match save_result with 1732 1899 | Error e -> Error e 1733 1900 | Ok () -> 1734 1901 all_changes_files := updated_cf :: !all_changes_files; 1735 - process_repos rest 1902 + process_repos rest))) 1736 1903 in 1737 1904 match process_repos repos with 1738 1905 | Error e -> Error e 1739 1906 | Ok () -> 1740 1907 (* Generate aggregated CHANGES.md *) 1741 - if not dry_run && !all_changes_files <> [] then begin 1908 + if (not dry_run) && !all_changes_files <> [] then begin 1742 1909 let markdown = Changes.aggregate ~history !all_changes_files in 1743 - let changes_md_path = Eio.Path.(fs_t / Fpath.to_string monorepo / "CHANGES.md") in 1744 - Eio.Path.save ~create:(`Or_truncate 0o644) changes_md_path markdown; 1910 + let changes_md_path = 1911 + Eio.Path.(fs_t / Fpath.to_string monorepo / "CHANGES.md") 1912 + in 1913 + Eio.Path.save ~create:(`Or_truncate 0o644) changes_md_path 1914 + markdown; 1745 1915 Log.app (fun m -> m "Generated CHANGES.md at monorepo root") 1746 1916 end; 1747 1917 Ok () ··· 1749 1919 1750 1920 (* Daily changes command - generate daily changelogs using Claude *) 1751 1921 1752 - let changes_daily ~proc ~fs ~config ~clock ?package ?(days = 1) ?(history = 30) ?(dry_run = false) ?(aggregate = false) () = 1922 + let changes_daily ~proc ~fs ~config ~clock ?package ?(days = 1) ?(history = 30) 1923 + ?(dry_run = false) ?(aggregate = false) () = 1753 1924 let fs_t = fs_typed fs in 1754 1925 let monorepo = Config.Paths.monorepo config in 1755 1926 1756 1927 (* Get current time *) 1757 1928 let now = Eio.Time.now clock in 1758 - let now_ptime = match Ptime.of_float_s now with 1759 - | Some t -> t 1760 - | None -> Ptime.v (0, 0L) (* fallback to epoch *) 1929 + let now_ptime = 1930 + match Ptime.of_float_s now with Some t -> t | None -> Ptime.v (0, 0L) 1931 + (* fallback to epoch *) 1761 1932 in 1762 1933 1763 1934 match discover_packages ~fs:(fs_t :> _ Eio.Path.t) ~config () with 1764 1935 | Error e -> Error e 1765 1936 | Ok all_pkgs -> 1766 1937 let repos = unique_repos all_pkgs in 1767 - let repos = match package with 1938 + let repos = 1939 + match package with 1768 1940 | None -> repos 1769 1941 | Some name -> List.filter (fun p -> Package.repo_name p = name) repos 1770 1942 in 1771 1943 if repos = [] && package <> None then 1772 1944 Error (Package_not_found (Option.get package)) 1773 1945 else begin 1774 - Log.info (fun m -> m "Processing daily changelogs for %d repositories" (List.length repos)); 1946 + Log.info (fun m -> 1947 + m "Processing daily changelogs for %d repositories" 1948 + (List.length repos)); 1775 1949 1776 1950 (* Process each repository *) 1777 1951 let all_changes_files = ref [] in 1778 1952 let rec process_repos = function 1779 1953 | [] -> Ok () 1780 - | pkg :: rest -> 1954 + | pkg :: rest -> ( 1781 1955 let repo_name = Package.repo_name pkg in 1782 1956 1783 1957 Log.info (fun m -> m "Processing %s" repo_name); ··· 1787 1961 if day_offset >= days then Ok () 1788 1962 else begin 1789 1963 (* Calculate day boundaries *) 1790 - let offset_seconds = float_of_int (day_offset * 24 * 60 * 60) in 1791 - let day_time = match Ptime.of_float_s (now -. offset_seconds) with 1964 + let offset_seconds = 1965 + float_of_int (day_offset * 24 * 60 * 60) 1966 + in 1967 + let day_time = 1968 + match Ptime.of_float_s (now -. offset_seconds) with 1792 1969 | Some t -> t 1793 1970 | None -> now_ptime 1794 1971 in ··· 1799 1976 (* For today, skip only if file has entries (may need to catch new commits) *) 1800 1977 let should_skip = 1801 1978 if is_today then 1802 - Changes.daily_exists ~fs:fs_t ~monorepo ~date repo_name && 1803 - (match Changes.load_daily ~fs:fs_t ~monorepo ~date repo_name with 1804 - | Ok cf -> Changes.has_day cf ~date 1805 - | Error _ -> false) 1806 - else 1807 1979 Changes.daily_exists ~fs:fs_t ~monorepo ~date repo_name 1980 + && 1981 + match 1982 + Changes.load_daily ~fs:fs_t ~monorepo ~date repo_name 1983 + with 1984 + | Ok cf -> Changes.has_day cf ~date 1985 + | Error _ -> false 1986 + else Changes.daily_exists ~fs:fs_t ~monorepo ~date repo_name 1808 1987 in 1809 1988 if should_skip then begin 1810 - Log.info (fun m -> m " Day %s already processed, skipping" date); 1811 - (match Changes.load_daily ~fs:fs_t ~monorepo ~date repo_name with 1812 - | Ok cf -> all_changes_files := cf :: !all_changes_files 1813 - | Error _ -> ()); 1989 + Log.info (fun m -> 1990 + m " Day %s already processed, skipping" date); 1991 + (match 1992 + Changes.load_daily ~fs:fs_t ~monorepo ~date repo_name 1993 + with 1994 + | Ok cf -> all_changes_files := cf :: !all_changes_files 1995 + | Error _ -> ()); 1814 1996 process_days (day_offset + 1) 1815 1997 end 1816 1998 else 1817 1999 (* Load existing daily changes from .changes/<repo>-<date>.json *) 1818 - match Changes.load_daily ~fs:fs_t ~monorepo ~date repo_name with 2000 + match 2001 + Changes.load_daily ~fs:fs_t ~monorepo ~date repo_name 2002 + with 1819 2003 | Error e -> Error (Claude_error e) 1820 - | Ok changes_file -> 2004 + | Ok changes_file -> ( 1821 2005 (* Get commits for this day *) 1822 2006 let since = date ^ " 00:00:00" in 1823 2007 let until = date ^ " 23:59:59" in 1824 - match Git.log ~proc ~fs:fs_t ~since ~until ~path:repo_name monorepo with 2008 + match 2009 + Git.log ~proc ~fs:fs_t ~since ~until ~path:repo_name 2010 + monorepo 2011 + with 1825 2012 | Error e -> Error (Git_error e) 1826 2013 | Ok commits -> 1827 2014 if commits = [] then begin 1828 - Log.info (fun m -> m " No commits for day %s" date); 2015 + Log.info (fun m -> 2016 + m " No commits for day %s" date); 1829 2017 process_days (day_offset + 1) 1830 2018 end 1831 2019 else begin 1832 - Log.info (fun m -> m " Found %d commits for day %s" (List.length commits) date); 2020 + Log.info (fun m -> 2021 + m " Found %d commits for day %s" 2022 + (List.length commits) date); 1833 2023 1834 2024 if dry_run then begin 1835 - Log.app (fun m -> m " [DRY RUN] Would analyze %d commits for %s on %s" 1836 - (List.length commits) repo_name date); 2025 + Log.app (fun m -> 2026 + m 2027 + " [DRY RUN] Would analyze %d commits \ 2028 + for %s on %s" 2029 + (List.length commits) repo_name date); 1837 2030 process_days (day_offset + 1) 1838 2031 end 1839 2032 else begin 1840 2033 (* Analyze commits with Claude *) 1841 2034 Eio.Switch.run @@ fun sw -> 1842 - match Changes.analyze_commits_daily ~sw ~process_mgr:proc ~clock 1843 - ~repository:repo_name ~date commits with 2035 + match 2036 + Changes.analyze_commits_daily ~sw 2037 + ~process_mgr:proc ~clock 2038 + ~repository:repo_name ~date commits 2039 + with 1844 2040 | Error e -> Error (Claude_error e) 1845 2041 | Ok None -> 1846 - Log.info (fun m -> m " No user-facing changes for day %s" date); 2042 + Log.info (fun m -> 2043 + m " No user-facing changes for day %s" 2044 + date); 1847 2045 process_days (day_offset + 1) 1848 - | Ok (Some response) -> 1849 - Log.app (fun m -> m " Generated changelog for %s on %s" repo_name date); 2046 + | Ok (Some response) -> ( 2047 + Log.app (fun m -> 2048 + m " Generated changelog for %s on %s" 2049 + repo_name date); 1850 2050 (* Extract unique contributors from commits *) 1851 2051 let contributors = 1852 2052 commits 1853 - |> List.map (fun (c : Git.log_entry) -> c.author) 2053 + |> List.map (fun (c : Git.log_entry) -> 2054 + c.author) 1854 2055 |> List.sort_uniq String.compare 1855 2056 in 1856 2057 (* Get repo URL from package dev_repo *) ··· 1858 2059 let uri = Package.dev_repo pkg in 1859 2060 let url = Uri.to_string uri in 1860 2061 (* Strip git+ prefix if present for display *) 1861 - if String.starts_with ~prefix:"git+" url then 1862 - Some (String.sub url 4 (String.length url - 4)) 1863 - else 1864 - Some url 2062 + if String.starts_with ~prefix:"git+" url 2063 + then 2064 + Some 2065 + (String.sub url 4 2066 + (String.length url - 4)) 2067 + else Some url 1865 2068 in 1866 2069 (* Create new entry with hour and timestamp *) 1867 - let first_hash = (List.hd commits).Git.hash in 1868 - let last_hash = (List.hd (List.rev commits)).Git.hash in 1869 - let (_, ((hour, _, _), _)) = Ptime.to_date_time now_ptime in 1870 - let entry : Changes.daily_entry = { 1871 - date; 1872 - hour; 1873 - timestamp = now_ptime; 1874 - summary = response.Changes.summary; 1875 - changes = response.Changes.changes; 1876 - commit_range = { 1877 - from_hash = String.sub first_hash 0 (min 7 (String.length first_hash)); 1878 - to_hash = String.sub last_hash 0 (min 7 (String.length last_hash)); 1879 - count = List.length commits; 1880 - }; 1881 - contributors; 1882 - repo_url; 1883 - } in 2070 + let first_hash = 2071 + (List.hd commits).Git.hash 2072 + in 2073 + let last_hash = 2074 + (List.hd (List.rev commits)).Git.hash 2075 + in 2076 + let _, ((hour, _, _), _) = 2077 + Ptime.to_date_time now_ptime 2078 + in 2079 + let entry : Changes.daily_entry = 2080 + { 2081 + date; 2082 + hour; 2083 + timestamp = now_ptime; 2084 + summary = response.Changes.summary; 2085 + changes = response.Changes.changes; 2086 + commit_range = 2087 + { 2088 + from_hash = 2089 + String.sub first_hash 0 2090 + (min 7 2091 + (String.length first_hash)); 2092 + to_hash = 2093 + String.sub last_hash 0 2094 + (min 7 (String.length last_hash)); 2095 + count = List.length commits; 2096 + }; 2097 + contributors; 2098 + repo_url; 2099 + } 2100 + in 1884 2101 (* Add entry (sorted by timestamp descending) *) 1885 2102 let new_entries = 1886 2103 entry :: changes_file.Changes.entries 1887 2104 |> List.sort (fun e1 e2 -> 1888 - Ptime.compare e2.Changes.timestamp e1.Changes.timestamp) 2105 + Ptime.compare e2.Changes.timestamp 2106 + e1.Changes.timestamp) 1889 2107 in 1890 - let updated_cf = { changes_file with Changes.entries = new_entries } in 2108 + let updated_cf = 2109 + { 2110 + changes_file with 2111 + Changes.entries = new_entries; 2112 + } 2113 + in 1891 2114 (* Save the per-day file *) 1892 - match Changes.save_daily ~fs:fs_t ~monorepo ~date updated_cf with 2115 + match 2116 + Changes.save_daily ~fs:fs_t ~monorepo 2117 + ~date updated_cf 2118 + with 1893 2119 | Error e -> Error (Claude_error e) 1894 2120 | Ok () -> 1895 - Log.app (fun m -> m "Saved .changes/%s-%s.json" repo_name date); 1896 - all_changes_files := updated_cf :: !all_changes_files; 1897 - process_days (day_offset + 1) 2121 + Log.app (fun m -> 2122 + m "Saved .changes/%s-%s.json" 2123 + repo_name date); 2124 + all_changes_files := 2125 + updated_cf :: !all_changes_files; 2126 + process_days (day_offset + 1)) 1898 2127 end 1899 - end 2128 + end) 1900 2129 end 1901 2130 in 1902 2131 match process_days 0 with 1903 2132 | Error e -> Error e 1904 - | Ok () -> process_repos rest 2133 + | Ok () -> process_repos rest) 1905 2134 in 1906 2135 match process_repos repos with 1907 2136 | Error e -> Error e 1908 2137 | Ok () -> 1909 2138 (* Generate aggregated DAILY-CHANGES.md *) 1910 - if not dry_run && !all_changes_files <> [] then begin 1911 - let raw_markdown = Changes.aggregate_daily ~history !all_changes_files in 2139 + if (not dry_run) && !all_changes_files <> [] then begin 2140 + let raw_markdown = 2141 + Changes.aggregate_daily ~history !all_changes_files 2142 + in 1912 2143 (* Refine the markdown through Claude for better narrative *) 1913 2144 Log.info (fun m -> m "Refining daily changelog with Claude..."); 1914 - let markdown = Eio.Switch.run @@ fun sw -> 1915 - match Changes.refine_daily_changelog ~sw ~process_mgr:proc ~clock raw_markdown with 2145 + let markdown = 2146 + Eio.Switch.run @@ fun sw -> 2147 + match 2148 + Changes.refine_daily_changelog ~sw ~process_mgr:proc ~clock 2149 + raw_markdown 2150 + with 1916 2151 | Ok refined -> 1917 - Log.app (fun m -> m "Refined daily changelog for readability"); 2152 + Log.app (fun m -> 2153 + m "Refined daily changelog for readability"); 1918 2154 refined 1919 2155 | Error e -> 1920 - Log.warn (fun m -> m "Failed to refine changelog: %s (using raw version)" e); 2156 + Log.warn (fun m -> 2157 + m "Failed to refine changelog: %s (using raw version)" e); 1921 2158 raw_markdown 1922 2159 in 1923 - let changes_md_path = Eio.Path.(fs_t / Fpath.to_string monorepo / "DAILY-CHANGES.md") in 1924 - Eio.Path.save ~create:(`Or_truncate 0o644) changes_md_path markdown; 2160 + let changes_md_path = 2161 + Eio.Path.(fs_t / Fpath.to_string monorepo / "DAILY-CHANGES.md") 2162 + in 2163 + Eio.Path.save ~create:(`Or_truncate 0o644) changes_md_path 2164 + markdown; 1925 2165 Log.app (fun m -> m "Generated DAILY-CHANGES.md at monorepo root") 1926 2166 end; 1927 2167 (* Generate aggregated JSON file if requested *) 1928 - if not dry_run && aggregate then begin 2168 + if (not dry_run) && aggregate then begin 1929 2169 let today = Changes.date_of_ptime now_ptime in 1930 2170 let git_head = 1931 2171 match Git.rev_parse ~proc ~fs:fs_t ~rev:"HEAD" monorepo with 1932 2172 | Ok hash -> String.sub hash 0 (min 7 (String.length hash)) 1933 2173 | Error _ -> "unknown" 1934 2174 in 1935 - match Changes.generate_aggregated ~fs:fs_t ~monorepo ~date:today ~git_head ~now:now_ptime with 1936 - | Ok () -> Log.app (fun m -> m "Generated aggregated file .changes/%s.json" 1937 - (String.concat "" (String.split_on_char '-' today))) 1938 - | Error e -> Log.warn (fun m -> m "Failed to generate aggregated file: %s" e) 2175 + match 2176 + Changes.generate_aggregated ~fs:fs_t ~monorepo ~date:today 2177 + ~git_head ~now:now_ptime 2178 + with 2179 + | Ok () -> 2180 + Log.app (fun m -> 2181 + m "Generated aggregated file .changes/%s.json" 2182 + (String.concat "" (String.split_on_char '-' today))) 2183 + | Error e -> 2184 + Log.warn (fun m -> 2185 + m "Failed to generate aggregated file: %s" e) 1939 2186 end; 1940 2187 Ok () 1941 2188 end
+39 -38
lib/monopam.mli
··· 51 51 (** [pp_error] formats errors. *) 52 52 53 53 val pp_error_with_hint : error Fmt.t 54 - (** [pp_error_with_hint] formats errors with a helpful hint for resolving them. *) 54 + (** [pp_error_with_hint] formats errors with a helpful hint for resolving them. 55 + *) 55 56 56 57 val error_hint : error -> string option 57 58 (** [error_hint e] returns a hint string for the given error, if available. *) ··· 81 82 ?opam_repo_url:string -> 82 83 unit -> 83 84 (unit, error) result 84 - (** [pull ~proc ~fs ~config ?package ?opam_repo_url ()] pulls updates from remotes. 85 + (** [pull ~proc ~fs ~config ?package ?opam_repo_url ()] pulls updates from 86 + remotes. 85 87 86 88 For each package (or the specified package): 1. Clones or fetches the 87 89 individual checkout 2. Adds or pulls the subtree in the monorepo ··· 95 97 @param fs Eio filesystem 96 98 @param config Monopam configuration 97 99 @param package Optional specific package to pull 98 - @param opam_repo_url Optional URL to clone opam-repo from if it doesn't exist *) 100 + @param opam_repo_url 101 + Optional URL to clone opam-repo from if it doesn't exist *) 99 102 100 103 (** {2 Push} *) 101 104 ··· 127 130 128 131 (** {2 Sync} *) 129 132 130 - (** Phase where a sync failure occurred. *) 131 133 type sync_phase = [ `Push_checkout | `Fetch | `Merge | `Subtree | `Push_remote ] 134 + (** Phase where a sync failure occurred. *) 132 135 133 - (** A failure during sync for a specific repository. *) 134 136 type sync_failure = { 135 137 repo_name : string; 136 138 phase : sync_phase; 137 139 error : Git.error; 138 140 } 141 + (** A failure during sync for a specific repository. *) 139 142 140 - (** Summary of a sync operation. *) 141 143 type sync_summary = { 142 144 repos_synced : int; 143 145 repos_unchanged : int; ··· 145 147 commits_pushed : int; 146 148 errors : sync_failure list; 147 149 } 150 + (** Summary of a sync operation. *) 148 151 149 152 val pp_sync_phase : sync_phase Fmt.t 150 153 (** [pp_sync_phase] formats a sync phase. *) ··· 168 171 (** [sync ~proc ~fs ~config ?package ?remote ?skip_push ?skip_pull ()] 169 172 synchronizes the monorepo with upstream repositories. 170 173 171 - This is the primary command for all sync operations. It performs both 172 - push and pull operations in the correct order: 173 - 1. Validate: check for dirty state (abort if dirty) 174 - 2. Push phase: export monorepo changes to checkouts (parallel) 175 - 3. Fetch phase: clone/fetch from remotes (parallel) 176 - 4. Merge phase: fast-forward merge checkouts (sequential) 177 - 5. Subtree phase: pull subtrees into monorepo (sequential) 178 - 6. Finalize: write README.md and dune-project (sequential) 179 - 7. Remote phase: push to upstream remotes if [~remote:true] (parallel) 174 + This is the primary command for all sync operations. It performs both push 175 + and pull operations in the correct order: 1. Validate: check for dirty state 176 + (abort if dirty) 2. Push phase: export monorepo changes to checkouts 177 + (parallel) 3. Fetch phase: clone/fetch from remotes (parallel) 4. Merge 178 + phase: fast-forward merge checkouts (sequential) 5. Subtree phase: pull 179 + subtrees into monorepo (sequential) 6. Finalize: write README.md and 180 + dune-project (sequential) 7. Remote phase: push to upstream remotes if 181 + [~remote:true] (parallel) 180 182 181 183 The fetch and remote push phases run concurrently for improved performance. 182 184 ··· 190 192 191 193 (** {2 Opam Metadata Sync} *) 192 194 193 - (** Result of syncing opam files from monorepo to opam-repo. *) 194 195 type opam_sync_result = { 195 196 synced : string list; (** Packages that were updated *) 196 197 unchanged : string list; (** Packages that were already in sync *) 197 198 missing : string list; (** Packages where monorepo has no .opam file *) 198 - orphaned : string list; (** Packages in opam-repo but subtree missing from monorepo *) 199 + orphaned : string list; 200 + (** Packages in opam-repo but subtree missing from monorepo *) 199 201 } 202 + (** Result of syncing opam files from monorepo to opam-repo. *) 200 203 201 204 val pp_opam_sync_result : opam_sync_result Fmt.t 202 205 (** [pp_opam_sync_result] formats an opam sync result. *) ··· 211 214 (** [sync_opam_files ~proc ~fs ~config ?package ()] synchronizes .opam files 212 215 from monorepo subtrees to the opam-repo overlay. 213 216 214 - For each package (or the specified package): 215 - 1. Checks if the subtree exists in the monorepo 216 - 2. If subtree missing, reports as orphaned (needs manual removal) 217 - 3. Reads the .opam file from the monorepo subtree 218 - 4. Compares with the opam-repo version 219 - 5. If different, copies monorepo → opam-repo (local always wins) 220 - 6. Stages and commits changes in opam-repo 217 + For each package (or the specified package): 1. Checks if the subtree exists 218 + in the monorepo 2. If subtree missing, reports as orphaned (needs manual 219 + removal) 3. Reads the .opam file from the monorepo subtree 4. Compares with 220 + the opam-repo version 5. If different, copies monorepo → opam-repo (local 221 + always wins) 6. Stages and commits changes in opam-repo 221 222 222 223 Orphaned packages (in opam-repo but subtree missing from monorepo) are 223 224 reported with a warning suggesting manual removal. ··· 317 318 (** [changes ~proc ~fs ~config ~clock ?package ?weeks ?history ?dry_run ()] 318 319 generates weekly changelog entries using Claude AI. 319 320 320 - For each repository (or the specified package's repository): 321 - 1. Loads or creates .changes/<repo>.json 322 - 2. For each week that doesn't have an entry, retrieves git commits 323 - 3. Sends commits to Claude for analysis 324 - 4. Saves changelog entries back to .changes/<repo>.json 321 + For each repository (or the specified package's repository): 1. Loads or 322 + creates .changes/<repo>.json 2. For each week that doesn't have an entry, 323 + retrieves git commits 3. Sends commits to Claude for analysis 4. Saves 324 + changelog entries back to .changes/<repo>.json 325 325 326 326 Also generates an aggregated CHANGES.md at the monorepo root. 327 327 ··· 346 346 ?aggregate:bool -> 347 347 unit -> 348 348 (unit, error) result 349 - (** [changes_daily ~proc ~fs ~config ~clock ?package ?days ?history ?dry_run ?aggregate ()] 350 - generates daily changelog entries using Claude AI. 349 + (** [changes_daily ~proc ~fs ~config ~clock ?package ?days ?history ?dry_run 350 + ?aggregate ()] generates daily changelog entries using Claude AI. 351 351 352 - For each repository (or the specified package's repository): 353 - 1. Loads or creates .changes/<repo>-daily.json 354 - 2. For each day that doesn't have an entry, retrieves git commits 355 - 3. Sends commits to Claude for analysis 356 - 4. Saves changelog entries back to .changes/<repo>-daily.json 352 + For each repository (or the specified package's repository): 1. Loads or 353 + creates .changes/<repo>-daily.json 2. For each day that doesn't have an 354 + entry, retrieves git commits 3. Sends commits to Claude for analysis 4. 355 + Saves changelog entries back to .changes/<repo>-daily.json 357 356 358 357 Also generates an aggregated DAILY-CHANGES.md at the monorepo root. 359 358 Repositories with no user-facing changes will have blank entries. ··· 367 366 @param clock Eio clock for time operations 368 367 @param package Optional specific repository to process 369 368 @param days Number of past days to analyze (default: 1) 370 - @param history Number of recent days to include in DAILY-CHANGES.md (default: 30) 369 + @param history 370 + Number of recent days to include in DAILY-CHANGES.md (default: 30) 371 371 @param dry_run If true, preview changes without writing files 372 - @param aggregate If true, also generate .changes/YYYYMMDD.json aggregated file *) 372 + @param aggregate 373 + If true, also generate .changes/YYYYMMDD.json aggregated file *)
+9 -10
lib/opam_repo.ml
··· 59 59 | OP.Option (inner, _) -> extract_dep_name inner 60 60 | _ -> None 61 61 62 - (** Extract all dependency package names from a depends value. 63 - The depends field is a list of package formulas. *) 62 + (** Extract all dependency package names from a depends value. The depends field 63 + is a list of package formulas. *) 64 64 let extract_depends_list (v : OP.value) : string list = 65 65 match v.pelem with 66 - | OP.List { pelem = items; _ } -> 67 - List.filter_map extract_dep_name items 68 - | _ -> ( 69 - match extract_dep_name v with Some s -> [ s ] | None -> []) 66 + | OP.List { pelem = items; _ } -> List.filter_map extract_dep_name items 67 + | _ -> ( match extract_dep_name v with Some s -> [ s ] | None -> []) 70 68 71 69 let find_depends (items : OP.opamfile_item list) : string list = 72 70 List.find_map ··· 163 161 let _, errors = scan_all ~fs repo_path in 164 162 errors 165 163 166 - (** Scan a directory for .opam files and extract all dependencies. 167 - This is used to find dependencies from monorepo subtree directories, 168 - where multiple .opam files may exist that aren't in the opam overlay. *) 164 + (** Scan a directory for .opam files and extract all dependencies. This is used 165 + to find dependencies from monorepo subtree directories, where multiple .opam 166 + files may exist that aren't in the opam overlay. *) 169 167 let scan_opam_files_for_deps ~fs dir_path = 170 168 let eio_path = Eio.Path.(fs / Fpath.to_string dir_path) in 171 169 try ··· 179 177 try 180 178 let content = Eio.Path.load opam_path in 181 179 let opamfile = 182 - OpamParser.FullPos.string content (Fpath.to_string dir_path ^ "/" ^ opam_file) 180 + OpamParser.FullPos.string content 181 + (Fpath.to_string dir_path ^ "/" ^ opam_file) 183 182 in 184 183 find_depends opamfile.file_contents 185 184 with _ -> [])
+4 -3
lib/opam_repo.mli
··· 80 80 (** [scan_opam_files_for_deps ~fs dir_path] scans a directory for .opam files 81 81 and extracts all dependencies from them. 82 82 83 - This is used to find dependencies from monorepo subtree directories, 84 - where multiple .opam files may exist that aren't in the opam overlay. 83 + This is used to find dependencies from monorepo subtree directories, where 84 + multiple .opam files may exist that aren't in the opam overlay. 85 85 86 86 @param fs Eio filesystem capability 87 87 @param dir_path Path to the directory to scan ··· 90 90 (** {1 Low-level Opam File Parsing} *) 91 91 92 92 val find_dev_repo : OpamParserTypes.FullPos.opamfile_item list -> string option 93 - (** [find_dev_repo items] extracts the dev-repo field from parsed opam file items. *) 93 + (** [find_dev_repo items] extracts the dev-repo field from parsed opam file 94 + items. *)
+4 -2
lib/package.mli
··· 20 20 ?synopsis:string -> 21 21 unit -> 22 22 t 23 - (** [create ~name ~version ~dev_repo ?branch ?depends ?synopsis ()] creates a new package. 23 + (** [create ~name ~version ~dev_repo ?branch ?depends ?synopsis ()] creates a 24 + new package. 24 25 25 26 @param name The opam package name 26 27 @param version The package version (e.g., "dev") ··· 44 45 (** [branch t] returns the branch to track, if explicitly set. *) 45 46 46 47 val depends : t -> string list 47 - (** [depends t] returns the list of opam package names this package depends on. *) 48 + (** [depends t] returns the list of opam package names this package depends on. 49 + *) 48 50 49 51 val synopsis : t -> string option 50 52 (** [synopsis t] returns the short description of the package, if any. *)
+85 -49
lib/status.ml
··· 8 8 9 9 (** Sync state between monorepo subtree and local checkout *) 10 10 type subtree_sync = 11 - | In_sync (** Subtree matches checkout HEAD *) 12 - | Subtree_behind of int (** Subtree needs pull from checkout (checkout has new commits) *) 13 - | Subtree_ahead of int (** Subtree has commits not in checkout (need push to checkout) *) 14 - | Trees_differ (** Trees differ but can't determine direction/count *) 15 - | Unknown (** Can't determine (subtree not added or checkout missing) *) 11 + | In_sync (** Subtree matches checkout HEAD *) 12 + | Subtree_behind of int 13 + (** Subtree needs pull from checkout (checkout has new commits) *) 14 + | Subtree_ahead of int 15 + (** Subtree has commits not in checkout (need push to checkout) *) 16 + | Trees_differ (** Trees differ but can't determine direction/count *) 17 + | Unknown (** Can't determine (subtree not added or checkout missing) *) 16 18 17 19 type t = { 18 20 package : Package.t; ··· 58 60 match (checkout, subtree) with 59 61 | (Missing | Not_a_repo | Dirty), _ -> Unknown 60 62 | _, Not_added -> Unknown 61 - | Clean _, Present -> 63 + | Clean _, Present -> ( 62 64 (* Get tree hash of subtree directory in monorepo *) 63 - let subtree_tree = Git.rev_parse ~proc ~fs:fs_t ~rev:("HEAD:" ^ prefix) monorepo in 65 + let subtree_tree = 66 + Git.rev_parse ~proc ~fs:fs_t ~rev:("HEAD:" ^ prefix) monorepo 67 + in 64 68 (* Get tree hash of checkout root *) 65 - let checkout_tree = Git.rev_parse ~proc ~fs:fs_t ~rev:"HEAD^{tree}" checkout_dir in 69 + let checkout_tree = 70 + Git.rev_parse ~proc ~fs:fs_t ~rev:"HEAD^{tree}" checkout_dir 71 + in 66 72 match (subtree_tree, checkout_tree) with 67 73 | Ok st, Ok ct when st = ct -> In_sync 68 - | Ok _, Ok _ -> 74 + | Ok _, Ok _ -> ( 69 75 (* Trees differ - check commit ancestry to determine direction *) 70 76 let subtree_commit = 71 - Git.subtree_last_upstream_commit ~proc ~fs:fs_t ~repo:monorepo ~prefix () 77 + Git.subtree_last_upstream_commit ~proc ~fs:fs_t ~repo:monorepo 78 + ~prefix () 72 79 in 73 80 let checkout_head = Git.head_commit ~proc ~fs:fs_t checkout_dir in 74 - (match (subtree_commit, checkout_head) with 81 + match (subtree_commit, checkout_head) with 75 82 | Some subtree_sha, Ok checkout_sha -> 76 - if Git.is_ancestor ~proc ~fs:fs_t ~repo:checkout_dir 77 - ~commit1:subtree_sha ~commit2:checkout_sha () then 83 + if 84 + Git.is_ancestor ~proc ~fs:fs_t ~repo:checkout_dir 85 + ~commit1:subtree_sha ~commit2:checkout_sha () 86 + then 78 87 (* Checkout has commits not in subtree - need subtree pull *) 79 - let count = Git.count_commits_between ~proc ~fs:fs_t ~repo:checkout_dir 80 - ~base:subtree_sha ~head:checkout_sha () in 81 - if count > 0 then Subtree_behind count 82 - else Trees_differ (* Same commit but trees differ - monorepo has changes *) 83 - else if Git.is_ancestor ~proc ~fs:fs_t ~repo:checkout_dir 84 - ~commit1:checkout_sha ~commit2:subtree_sha () then 88 + let count = 89 + Git.count_commits_between ~proc ~fs:fs_t ~repo:checkout_dir 90 + ~base:subtree_sha ~head:checkout_sha () 91 + in 92 + if count > 0 then Subtree_behind count else Trees_differ 93 + (* Same commit but trees differ - monorepo has changes *) 94 + else if 95 + Git.is_ancestor ~proc ~fs:fs_t ~repo:checkout_dir 96 + ~commit1:checkout_sha ~commit2:subtree_sha () 97 + then 85 98 (* Subtree has content not in checkout - need push *) 86 - let count = Git.count_commits_between ~proc ~fs:fs_t ~repo:checkout_dir 87 - ~base:checkout_sha ~head:subtree_sha () in 88 - if count > 0 then Subtree_ahead count 89 - else Trees_differ 90 - else 91 - Trees_differ (* Diverged *) 92 - | _ -> Trees_differ) (* Trees differ but can't determine ancestry *) 93 - | _ -> Unknown 99 + let count = 100 + Git.count_commits_between ~proc ~fs:fs_t ~repo:checkout_dir 101 + ~base:checkout_sha ~head:subtree_sha () 102 + in 103 + if count > 0 then Subtree_ahead count else Trees_differ 104 + else Trees_differ (* Diverged *) 105 + | _ -> Trees_differ 106 + (* Trees differ but can't determine ancestry *)) 107 + | _ -> Unknown) 94 108 in 95 109 { package = pkg; checkout; subtree; subtree_sync } 96 110 ··· 113 127 114 128 (** Needs remote action: checkout ahead/behind of upstream *) 115 129 let needs_remote_action t = 116 - match t.checkout with 117 - | Clean ab -> ab.ahead > 0 || ab.behind > 0 118 - | _ -> false 130 + match t.checkout with Clean ab -> ab.ahead > 0 || ab.behind > 0 | _ -> false 119 131 120 132 let is_fully_synced t = 121 133 match (t.checkout, t.subtree, t.subtree_sync) with ··· 128 140 match t.checkout with 129 141 | Missing | Not_a_repo | Dirty -> true 130 142 | Clean ab -> 131 - ab.ahead > 0 || ab.behind > 0 || 132 - t.subtree = Not_added || 133 - needs_local_sync t) 143 + ab.ahead > 0 || ab.behind > 0 || t.subtree = Not_added 144 + || needs_local_sync t) 134 145 statuses 135 146 136 147 let pp_checkout_status ppf = function ··· 155 166 (* Helper to print remote sync info *) 156 167 let pp_remote ab = 157 168 if ab.Git.ahead > 0 && ab.behind > 0 then 158 - Fmt.pf ppf " %a" Fmt.(styled `Yellow (fun ppf (a, b) -> pf ppf "remote:+%d/-%d" a b)) (ab.ahead, ab.behind) 169 + Fmt.pf ppf " %a" 170 + Fmt.(styled `Yellow (fun ppf (a, b) -> pf ppf "remote:+%d/-%d" a b)) 171 + (ab.ahead, ab.behind) 159 172 else if ab.ahead > 0 then 160 - Fmt.pf ppf " %a" Fmt.(styled `Cyan (fun ppf n -> pf ppf "remote:+%d" n)) ab.ahead 173 + Fmt.pf ppf " %a" 174 + Fmt.(styled `Cyan (fun ppf n -> pf ppf "remote:+%d" n)) 175 + ab.ahead 161 176 else if ab.behind > 0 then 162 - Fmt.pf ppf " %a" Fmt.(styled `Red (fun ppf n -> pf ppf "remote:-%d" n)) ab.behind 177 + Fmt.pf ppf " %a" 178 + Fmt.(styled `Red (fun ppf n -> pf ppf "remote:-%d" n)) 179 + ab.behind 163 180 in 164 181 match (t.checkout, t.subtree, t.subtree_sync) with 165 182 (* Local sync issues with count *) 166 183 | Clean ab, Present, Subtree_behind n -> 167 - Fmt.pf ppf "%-22s %a" name Fmt.(styled `Blue (fun ppf n -> pf ppf "local:-%d" n)) n; 184 + Fmt.pf ppf "%-22s %a" name 185 + Fmt.(styled `Blue (fun ppf n -> pf ppf "local:-%d" n)) 186 + n; 168 187 pp_remote ab 169 188 | Clean ab, Present, Subtree_ahead n -> 170 - Fmt.pf ppf "%-22s %a" name Fmt.(styled `Blue (fun ppf n -> pf ppf "local:+%d" n)) n; 189 + Fmt.pf ppf "%-22s %a" name 190 + Fmt.(styled `Blue (fun ppf n -> pf ppf "local:+%d" n)) 191 + n; 171 192 pp_remote ab 172 193 (* Trees differ but can't determine count *) 173 194 | Clean ab, Present, Trees_differ -> ··· 175 196 pp_remote ab 176 197 (* Remote sync issues only *) 177 198 | Clean ab, Present, (In_sync | Unknown) when ab.ahead > 0 && ab.behind > 0 -> 178 - Fmt.pf ppf "%-22s %a" name Fmt.(styled `Yellow (fun ppf (a,b) -> pf ppf "remote:+%d/-%d" a b)) (ab.ahead, ab.behind) 199 + Fmt.pf ppf "%-22s %a" name 200 + Fmt.(styled `Yellow (fun ppf (a, b) -> pf ppf "remote:+%d/-%d" a b)) 201 + (ab.ahead, ab.behind) 179 202 | Clean ab, Present, (In_sync | Unknown) when ab.ahead > 0 -> 180 - Fmt.pf ppf "%-22s %a" name Fmt.(styled `Cyan (fun ppf n -> pf ppf "remote:+%d" n)) ab.ahead 203 + Fmt.pf ppf "%-22s %a" name 204 + Fmt.(styled `Cyan (fun ppf n -> pf ppf "remote:+%d" n)) 205 + ab.ahead 181 206 | Clean ab, Present, (In_sync | Unknown) when ab.behind > 0 -> 182 - Fmt.pf ppf "%-22s %a" name Fmt.(styled `Red (fun ppf n -> pf ppf "remote:-%d" n)) ab.behind 207 + Fmt.pf ppf "%-22s %a" name 208 + Fmt.(styled `Red (fun ppf n -> pf ppf "remote:-%d" n)) 209 + ab.behind 183 210 (* Other issues *) 184 211 | Clean _, Not_added, _ -> 185 212 Fmt.pf ppf "%-22s %a" name Fmt.(styled `Magenta string) "(no subtree)" ··· 197 224 let actionable = filter_actionable statuses in 198 225 let synced = List.filter is_fully_synced statuses |> List.length in 199 226 let dirty = List.filter has_local_changes statuses |> List.length in 200 - let local_sync_needed = List.filter needs_local_sync statuses |> List.length in 227 + let local_sync_needed = 228 + List.filter needs_local_sync statuses |> List.length 229 + in 201 230 let remote_needed = List.filter needs_remote_action statuses |> List.length in 202 231 let action_count = List.length actionable in 203 232 (* Header line with colors *) 204 233 if dirty > 0 then 205 234 Fmt.pf ppf "%a %d total, %a synced, %a dirty\n" 206 - Fmt.(styled `Bold string) "Packages:" total 207 - Fmt.(styled `Green int) synced 208 - Fmt.(styled `Yellow int) dirty 235 + Fmt.(styled `Bold string) 236 + "Packages:" total 237 + Fmt.(styled `Green int) 238 + synced 239 + Fmt.(styled `Yellow int) 240 + dirty 209 241 else if action_count > 0 then begin 210 242 Fmt.pf ppf "%a %d total, %a synced" 211 - Fmt.(styled `Bold string) "Packages:" total 212 - Fmt.(styled `Green int) synced; 243 + Fmt.(styled `Bold string) 244 + "Packages:" total 245 + Fmt.(styled `Green int) 246 + synced; 213 247 if local_sync_needed > 0 then 214 248 Fmt.pf ppf ", %a local sync" Fmt.(styled `Blue int) local_sync_needed; 215 249 if remote_needed > 0 then ··· 218 252 end 219 253 else 220 254 Fmt.pf ppf "%a %d total, %a\n" 221 - Fmt.(styled `Bold string) "Packages:" total 222 - Fmt.(styled `Green string) "all synced"; 255 + Fmt.(styled `Bold string) 256 + "Packages:" total 257 + Fmt.(styled `Green string) 258 + "all synced"; 223 259 (* Only show actionable items *) 224 260 if actionable <> [] then 225 261 List.iter (fun t -> Fmt.pf ppf " %a\n" pp_compact t) actionable
+2 -3
lib/status.mli
··· 18 18 | Not_added (** Subtree has not been added to monorepo *) 19 19 | Present (** Subtree exists in monorepo *) 20 20 21 - (** Sync state between monorepo subtree and local checkout. 22 - This distinguishes issues fixable with [monopam sync] from those 23 - requiring network access. *) 21 + (** Sync state between monorepo subtree and local checkout. This distinguishes 22 + issues fixable with [monopam sync] from those requiring network access. *) 24 23 type subtree_sync = 25 24 | In_sync (** Subtree matches checkout HEAD *) 26 25 | Subtree_behind of int
+104 -71
lib/verse.ml
··· 16 16 17 17 let error_hint = function 18 18 | Config_error _ -> 19 - Some "Run 'monopam verse init --handle <your-handle>' to create a workspace." 19 + Some 20 + "Run 'monopam verse init --handle <your-handle>' to create a workspace." 20 21 | Git_error (Git.Dirty_worktree _) -> 21 22 Some "Commit or stash your changes first: git status" 22 - | Git_error (Git.Command_failed (cmd, _)) when String.starts_with ~prefix:"git clone" cmd -> 23 + | Git_error (Git.Command_failed (cmd, _)) 24 + when String.starts_with ~prefix:"git clone" cmd -> 23 25 Some "Check the URL is correct and you have network access." 24 - | Git_error (Git.Command_failed (cmd, _)) when String.starts_with ~prefix:"git pull" cmd -> 26 + | Git_error (Git.Command_failed (cmd, _)) 27 + when String.starts_with ~prefix:"git pull" cmd -> 25 28 Some "Check your network connection. Try: git fetch origin" 26 29 | Git_error _ -> None 27 30 | Registry_error _ -> 28 31 Some "The registry may be temporarily unavailable. Try again later." 29 32 | Member_not_found h -> 30 - Some (Fmt.str "Check available members: monopam verse members (looking for '%s')" h) 33 + Some 34 + (Fmt.str 35 + "Check available members: monopam verse members (looking for '%s')" h) 31 36 | Workspace_exists _ -> 32 37 Some "Use a different directory, or remove the existing workspace." 33 38 | Not_a_workspace _ -> 34 - Some "Run 'monopam verse init --handle <your-handle>' to create a workspace here." 39 + Some 40 + "Run 'monopam verse init --handle <your-handle>' to create a workspace \ 41 + here." 35 42 36 43 let pp_error_with_hint ppf e = 37 44 pp_error ppf e; ··· 69 76 70 77 let pp_status ppf s = 71 78 Fmt.pf ppf "@[<v>Workspace: %a@,Registry: %s@,Members:@, @[<v>%a@]@]" 72 - Fpath.pp (Verse_config.root s.config) 79 + Fpath.pp 80 + (Verse_config.root s.config) 73 81 s.registry.name 74 82 Fmt.(list ~sep:cut pp_member_status) 75 83 s.tracked_members ··· 103 111 let eio_path = Eio.Path.(fs / Fpath.to_string verse_path) in 104 112 try 105 113 Eio.Path.read_dir eio_path 106 - |> List.filter (fun name -> 107 - is_directory ~fs Fpath.(verse_path / name)) 114 + |> List.filter (fun name -> is_directory ~fs Fpath.(verse_path / name)) 108 115 with Eio.Io _ -> [] 109 116 110 117 let init ~proc ~fs ~root ~handle () = ··· 126 133 (* Ensure the directory exists first so realpath works *) 127 134 (try Eio.Path.mkdirs ~perm:0o755 eio_path with Eio.Io _ -> ()); 128 135 match Unix.realpath root_str with 129 - | abs_str -> (match Fpath.of_string abs_str with Ok p -> p | Error _ -> root) 136 + | abs_str -> ( 137 + match Fpath.of_string abs_str with Ok p -> p | Error _ -> root) 130 138 | exception _ -> root 131 139 in 132 140 Logs.info (fun m -> m "Workspace root: %a" Fpath.pp root); ··· 138 146 | Error msg -> 139 147 Logs.err (fun m -> m "Registry clone failed: %s" msg); 140 148 Error (Registry_error msg) 141 - | Ok registry -> 149 + | Ok registry -> ( 142 150 Logs.info (fun m -> m "Registry loaded"); 143 151 (* Look up user in registry - this validates the handle *) 144 152 match Verse_registry.find_member registry ~handle with 145 153 | None -> 146 154 Logs.err (fun m -> m "Handle %s not found in registry" handle); 147 155 Error (Member_not_found handle) 148 - | Some member -> 149 - Logs.info (fun m -> m "Found member: mono=%s opam=%s" member.monorepo member.opamrepo); 150 - (* Create workspace directories *) 151 - Logs.info (fun m -> m "Creating workspace directories..."); 152 - ensure_dir ~fs root; 153 - ensure_dir ~fs (Verse_config.src_path config); 154 - ensure_dir ~fs (Verse_config.verse_path config); 155 - (* Clone user's monorepo *) 156 - let mono_path = Verse_config.mono_path config in 157 - Logs.info (fun m -> m "Cloning monorepo to %a" Fpath.pp mono_path); 158 - let mono_url = Uri.of_string member.monorepo in 159 - (match Git.clone ~proc ~fs ~url:mono_url ~branch:Verse_config.default_branch mono_path with 156 + | Some member -> ( 157 + Logs.info (fun m -> 158 + m "Found member: mono=%s opam=%s" member.monorepo 159 + member.opamrepo); 160 + (* Create workspace directories *) 161 + Logs.info (fun m -> m "Creating workspace directories..."); 162 + ensure_dir ~fs root; 163 + ensure_dir ~fs (Verse_config.src_path config); 164 + ensure_dir ~fs (Verse_config.verse_path config); 165 + (* Clone user's monorepo *) 166 + let mono_path = Verse_config.mono_path config in 167 + Logs.info (fun m -> m "Cloning monorepo to %a" Fpath.pp mono_path); 168 + let mono_url = Uri.of_string member.monorepo in 169 + match 170 + Git.clone ~proc ~fs ~url:mono_url 171 + ~branch:Verse_config.default_branch mono_path 172 + with 173 + | Error e -> 174 + Logs.err (fun m -> m "Monorepo clone failed: %a" Git.pp_error e); 175 + Error (Git_error e) 176 + | Ok () -> ( 177 + Logs.info (fun m -> m "Monorepo cloned"); 178 + (* Clone user's opam repo *) 179 + let opam_path = Verse_config.opam_repo_path config in 180 + Logs.info (fun m -> 181 + m "Cloning opam repo to %a" Fpath.pp opam_path); 182 + let opam_url = Uri.of_string member.opamrepo in 183 + match 184 + Git.clone ~proc ~fs ~url:opam_url 185 + ~branch:Verse_config.default_branch opam_path 186 + with 160 187 | Error e -> 161 - Logs.err (fun m -> m "Monorepo clone failed: %a" Git.pp_error e); 188 + Logs.err (fun m -> 189 + m "Opam repo clone failed: %a" Git.pp_error e); 162 190 Error (Git_error e) 163 - | Ok () -> 164 - Logs.info (fun m -> m "Monorepo cloned"); 165 - (* Clone user's opam repo *) 166 - let opam_path = Verse_config.opam_repo_path config in 167 - Logs.info (fun m -> m "Cloning opam repo to %a" Fpath.pp opam_path); 168 - let opam_url = Uri.of_string member.opamrepo in 169 - (match Git.clone ~proc ~fs ~url:opam_url ~branch:Verse_config.default_branch opam_path with 170 - | Error e -> 171 - Logs.err (fun m -> m "Opam repo clone failed: %a" Git.pp_error e); 172 - Error (Git_error e) 191 + | Ok () -> ( 192 + Logs.info (fun m -> m "Opam repo cloned"); 193 + (* Save config to XDG *) 194 + Logs.info (fun m -> 195 + m "Saving config to %a" Fpath.pp config_file); 196 + match Verse_config.save ~fs config with 197 + | Error msg -> 198 + Logs.err (fun m -> m "Failed to save config: %s" msg); 199 + Error (Config_error msg) 173 200 | Ok () -> 174 - Logs.info (fun m -> m "Opam repo cloned"); 175 - (* Save config to XDG *) 176 - Logs.info (fun m -> m "Saving config to %a" Fpath.pp config_file); 177 - (match Verse_config.save ~fs config with 178 - | Error msg -> 179 - Logs.err (fun m -> m "Failed to save config: %s" msg); 180 - Error (Config_error msg) 181 - | Ok () -> 182 - Logs.info (fun m -> m "Workspace initialized successfully"); 183 - Ok ()))) 201 + Logs.info (fun m -> 202 + m "Workspace initialized successfully"); 203 + Ok ())))) 184 204 185 205 let status ~proc ~fs ~config () = 186 206 (* Load registry *) ··· 197 217 match Verse_registry.find_member registry ~handle with 198 218 | None -> 199 219 (* Member not in registry but locally tracked - show anyway *) 200 - let local_path = Fpath.(Verse_config.verse_path config / handle) in 220 + let local_path = 221 + Fpath.(Verse_config.verse_path config / handle) 222 + in 201 223 let cloned = is_directory ~fs local_path in 202 224 Some 203 225 { ··· 242 264 | Error msg -> Error (Registry_error msg) 243 265 | Ok registry -> Ok registry.members 244 266 245 - 246 - (** Clone or pull a single git repo. Returns Ok true if cloned, Ok false if pulled. *) 267 + (** Clone or pull a single git repo. Returns Ok true if cloned, Ok false if 268 + pulled. *) 247 269 let clone_or_pull_repo ~proc ~fs ~url ~branch path = 248 270 if Git.is_repo ~proc ~fs path then begin 249 - match Git.pull ~proc ~fs path with 250 - | Error e -> Error e 251 - | Ok () -> Ok false 271 + match Git.pull ~proc ~fs path with Error e -> Error e | Ok () -> Ok false 252 272 end 253 273 else begin 254 274 let url = Uri.of_string url in ··· 262 282 match Verse_registry.clone_or_pull ~proc ~fs ~config () with 263 283 | Error msg -> Error (Registry_error msg) 264 284 | Ok registry -> 265 - let members = match handle with 266 - | Some h -> 267 - (match Verse_registry.find_member registry ~handle:h with 268 - | Some m -> [m] 285 + let members = 286 + match handle with 287 + | Some h -> ( 288 + match Verse_registry.find_member registry ~handle:h with 289 + | Some m -> [ m ] 269 290 | None -> []) 270 291 | None -> registry.members 271 292 in ··· 287 308 clone_or_pull_repo ~proc ~fs ~url:member.monorepo 288 309 ~branch:Verse_config.default_branch mono_path 289 310 in 290 - let mono_err = match mono_result with 291 - | Ok true -> Logs.info (fun m -> m " Cloned %s monorepo" h); None 292 - | Ok false -> Logs.info (fun m -> m " Pulled %s monorepo" h); None 311 + let mono_err = 312 + match mono_result with 313 + | Ok true -> 314 + Logs.info (fun m -> m " Cloned %s monorepo" h); 315 + None 316 + | Ok false -> 317 + Logs.info (fun m -> m " Pulled %s monorepo" h); 318 + None 293 319 | Error e -> 294 - Logs.warn (fun m -> m " Failed %s monorepo: %a" h Git.pp_error e); 320 + Logs.warn (fun m -> 321 + m " Failed %s monorepo: %a" h Git.pp_error e); 295 322 Some (Fmt.str "%s monorepo: %a" h Git.pp_error e) 296 323 in 297 324 (* Clone or pull opam repo *) ··· 300 327 clone_or_pull_repo ~proc ~fs ~url:member.opamrepo 301 328 ~branch:Verse_config.default_branch opam_path 302 329 in 303 - let opam_err = match opam_result with 304 - | Ok true -> Logs.info (fun m -> m " Cloned %s opam repo" h); None 305 - | Ok false -> Logs.info (fun m -> m " Pulled %s opam repo" h); None 330 + let opam_err = 331 + match opam_result with 332 + | Ok true -> 333 + Logs.info (fun m -> m " Cloned %s opam repo" h); 334 + None 335 + | Ok false -> 336 + Logs.info (fun m -> m " Pulled %s opam repo" h); 337 + None 306 338 | Error e -> 307 - Logs.warn (fun m -> m " Failed %s opam repo: %a" h Git.pp_error e); 339 + Logs.warn (fun m -> 340 + m " Failed %s opam repo: %a" h Git.pp_error e); 308 341 Some (Fmt.str "%s opam: %a" h Git.pp_error e) 309 342 in 310 343 match (mono_err, opam_err) with ··· 321 354 (* pull already updates registry and syncs all members *) 322 355 pull ~proc ~fs ~config () 323 356 324 - (** Scan a monorepo for subtree directories. 325 - Returns a list of directory names that look like subtrees (have commits). *) 357 + (** Scan a monorepo for subtree directories. Returns a list of directory names 358 + that look like subtrees (have commits). *) 326 359 let scan_subtrees ~proc ~fs monorepo_path = 327 360 if not (Git.is_repo ~proc ~fs monorepo_path) then [] 328 361 else ··· 330 363 try 331 364 Eio.Path.read_dir eio_path 332 365 |> List.filter (fun name -> 333 - (* Skip hidden dirs and common non-subtree dirs *) 334 - not (String.starts_with ~prefix:"." name) 335 - && name <> "_build" 336 - && name <> "node_modules" 337 - && is_directory ~fs Fpath.(monorepo_path / name)) 366 + (* Skip hidden dirs and common non-subtree dirs *) 367 + (not (String.starts_with ~prefix:"." name)) 368 + && name <> "_build" && name <> "node_modules" 369 + && is_directory ~fs Fpath.(monorepo_path / name)) 338 370 with Eio.Io _ -> [] 339 371 340 - (** Get subtrees from all tracked verse members. 341 - Returns a map from subtree name to list of (handle, monorepo_path) pairs. *) 372 + (** Get subtrees from all tracked verse members. Returns a map from subtree name 373 + to list of (handle, monorepo_path) pairs. *) 342 374 let get_verse_subtrees ~proc ~fs ~config () = 343 375 let verse_path = Verse_config.verse_path config in 344 376 let tracked_handles = get_tracked_handles ~fs config in ··· 354 386 let existing = 355 387 try Hashtbl.find subtree_map subtree with Not_found -> [] 356 388 in 357 - Hashtbl.replace subtree_map subtree ((handle, member_mono) :: existing)) 389 + Hashtbl.replace subtree_map subtree 390 + ((handle, member_mono) :: existing)) 358 391 subtrees 359 392 end) 360 393 tracked_handles;
+10 -7
lib/verse.mli
··· 1 1 (** Monoverse operations. 2 2 3 - Federated monorepo collaboration. Members are identified by handles 4 - and validated against the registry. *) 3 + Federated monorepo collaboration. Members are identified by handles and 4 + validated against the registry. *) 5 5 6 6 (** {1 Error Types} *) 7 7 ··· 17 17 (** [pp_error] formats errors. *) 18 18 19 19 val pp_error_with_hint : error Fmt.t 20 - (** [pp_error_with_hint] formats errors with a helpful hint for resolving them. *) 20 + (** [pp_error_with_hint] formats errors with a helpful hint for resolving them. 21 + *) 21 22 22 23 val error_hint : error -> string option 23 24 (** [error_hint e] returns a hint string for the given error, if available. *) ··· 30 31 local_path : Fpath.t; (** Local path under verse/ *) 31 32 cloned : bool; (** Whether the monorepo is cloned locally *) 32 33 clean : bool option; (** Whether the clone is clean (None if not cloned) *) 33 - ahead_behind : Git.ahead_behind option; (** Ahead/behind status (None if not cloned) *) 34 + ahead_behind : Git.ahead_behind option; 35 + (** Ahead/behind status (None if not cloned) *) 34 36 } 35 37 (** Status of a member's monorepo in the workspace. *) 36 38 ··· 101 103 (unit, error) result 102 104 (** [pull ~proc ~fs ~config ?handle ()] syncs all registry members. 103 105 104 - For each member in the registry, clones or pulls both their monorepo 105 - (to [verse/<handle>/]) and their opam repo (to [verse/<handle>-opam/]). 106 + For each member in the registry, clones or pulls both their monorepo (to 107 + [verse/<handle>/]) and their opam repo (to [verse/<handle>-opam/]). 106 108 107 109 If [handle] is specified, only syncs that specific member. 108 110 ··· 137 139 unit -> 138 140 (string, (string * Fpath.t) list) Hashtbl.t 139 141 (** [get_verse_subtrees ~proc ~fs ~config ()] scans all tracked verse members 140 - and returns a map from subtree name to list of (handle, monorepo_path) pairs. 142 + and returns a map from subtree name to list of (handle, monorepo_path) 143 + pairs. 141 144 142 145 This allows finding which verse users have a particular repo. *)
+8 -17
lib/verse_config.ml
··· 1 1 let app_name = "monopam" 2 2 3 3 (* Simplified config: just root and handle. Paths are hardcoded. *) 4 - type t = { 5 - root : Fpath.t; 6 - handle : string; 7 - } 4 + type t = { root : Fpath.t; handle : string } 8 5 9 6 let root t = t.root 10 7 let handle t = t.handle ··· 20 17 let xdg_config_home () = 21 18 match Sys.getenv_opt "XDG_CONFIG_HOME" with 22 19 | Some dir when dir <> "" -> Fpath.v dir 23 - | _ -> 20 + | _ -> ( 24 21 match Sys.getenv_opt "HOME" with 25 22 | Some home -> Fpath.(v home / ".config") 26 - | None -> Fpath.v "/tmp" 23 + | None -> Fpath.v "/tmp") 27 24 28 25 let xdg_data_home () = 29 26 match Sys.getenv_opt "XDG_DATA_HOME" with 30 27 | Some dir when dir <> "" -> Fpath.v dir 31 - | _ -> 28 + | _ -> ( 32 29 match Sys.getenv_opt "HOME" with 33 30 | Some home -> Fpath.(v home / ".local" / "share") 34 - | None -> Fpath.v "/tmp" 31 + | None -> Fpath.v "/tmp") 35 32 36 33 let config_dir () = Fpath.(xdg_config_home () / app_name) 37 34 let data_dir () = Fpath.(xdg_data_home () / app_name) 38 35 let config_file () = Fpath.(config_dir () / "opamverse.toml") 39 36 let registry_path () = Fpath.(data_dir () / "opamverse-registry") 40 - 41 37 let create ~root ~handle () = { root; handle } 42 38 43 39 let expand_tilde s = ··· 94 90 let load ~fs () = 95 91 let path = config_file () in 96 92 let path_str = Fpath.to_string path in 97 - try Ok (Tomlt_eio.decode_path_exn codec ~fs path_str) 98 - with 93 + try Ok (Tomlt_eio.decode_path_exn codec ~fs path_str) with 99 94 | Eio.Io _ as e -> Error (Printexc.to_string e) 100 95 | Failure msg -> Error (Fmt.str "Invalid config: %s" msg) 101 96 ··· 111 106 with Eio.Io _ as e -> Error (Printexc.to_string e) 112 107 113 108 let pp ppf t = 114 - Fmt.pf ppf 115 - "@[<v>workspace:@,\ 116 - \ root: %a@,\ 117 - identity:@,\ 118 - \ handle: %s@]" 119 - Fpath.pp t.root t.handle 109 + Fmt.pf ppf "@[<v>workspace:@, root: %a@,identity:@, handle: %s@]" Fpath.pp 110 + t.root t.handle
+6 -4
lib/verse_config.mli
··· 3 3 Configuration is stored in the XDG config directory at 4 4 [~/.config/monopam/opamverse.toml]. 5 5 6 - The config stores just the workspace root and user's handle. 7 - All paths are derived from the root: 6 + The config stores just the workspace root and user's handle. All paths are 7 + derived from the root: 8 8 - [mono/] - user's monorepo 9 9 - [src/] - git checkouts for subtrees 10 10 - [opam-repo/] - opam overlay repository ··· 35 35 (** [src_path t] returns the path to git checkouts ([root/src/]). *) 36 36 37 37 val opam_repo_path : t -> Fpath.t 38 - (** [opam_repo_path t] returns the path to the opam overlay ([root/opam-repo/]). *) 38 + (** [opam_repo_path t] returns the path to the opam overlay ([root/opam-repo/]). 39 + *) 39 40 40 41 val verse_path : t -> Fpath.t 41 - (** [verse_path t] returns the path to tracked members' monorepos ([root/verse/]). *) 42 + (** [verse_path t] returns the path to tracked members' monorepos 43 + ([root/verse/]). *) 42 44 43 45 (** {1 XDG Paths} *) 44 46
+17 -13
lib/verse_registry.ml
··· 7 7 Fmt.pf ppf "@[<hov 2>%s ->@ mono:%s@ opam:%s@]" m.handle m.monorepo m.opamrepo 8 8 9 9 let pp ppf t = 10 - Fmt.pf ppf "@[<v>registry: %s@,members:@, @[<v>%a@]@]" 11 - t.name Fmt.(list ~sep:cut pp_member) t.members 10 + Fmt.pf ppf "@[<v>registry: %s@,members:@, @[<v>%a@]@]" t.name 11 + Fmt.(list ~sep:cut pp_member) 12 + t.members 12 13 13 14 (* TOML structure: 14 15 [registry] ··· 45 46 { name = registry.r_name; members = Option.value ~default:[] members }) 46 47 |> mem "registry" registry_info_codec ~enc:(fun t -> { r_name = t.name }) 47 48 |> opt_mem "members" (list member_codec) ~enc:(fun t -> 48 - match t.members with [] -> None | ms -> Some ms) 49 + match t.members with [] -> None | ms -> Some ms) 49 50 |> finish)) 50 51 51 52 let empty_registry = { name = "opamverse"; members = [] } ··· 55 56 Logs.info (fun m -> m "Loading registry from path: %s" path_str); 56 57 try 57 58 let registry = Tomlt_eio.decode_path_exn codec ~fs path_str in 58 - Logs.info (fun m -> m "Registry loaded: %d members" (List.length registry.members)); 59 + Logs.info (fun m -> 60 + m "Registry loaded: %d members" (List.length registry.members)); 59 61 Ok registry 60 62 with 61 63 | Eio.Io _ as e -> ··· 65 67 Logs.err (fun m -> m "Registry parse error: %s" msg); 66 68 Error (Fmt.str "Invalid registry: %s" msg) 67 69 | exn -> 68 - Logs.err (fun m -> m "Unexpected registry error: %s" (Printexc.to_string exn)); 70 + Logs.err (fun m -> 71 + m "Unexpected registry error: %s" (Printexc.to_string exn)); 69 72 Error (Fmt.str "Registry error: %s" (Printexc.to_string exn)) 70 73 71 74 let save ~fs path registry = ··· 91 94 Logs.info (fun m -> m "Registry exists, pulling updates..."); 92 95 (* Pull updates, but don't fail if pull fails *) 93 96 (match Git.pull ~proc ~fs registry_path with 94 - | Ok () -> Logs.info (fun m -> m "Registry pull succeeded") 95 - | Error e -> Logs.warn (fun m -> m "Registry pull failed: %a (using cached)" Git.pp_error e)); 97 + | Ok () -> Logs.info (fun m -> m "Registry pull succeeded") 98 + | Error e -> 99 + Logs.warn (fun m -> 100 + m "Registry pull failed: %a (using cached)" Git.pp_error e)); 96 101 Logs.info (fun m -> m "Loading registry from %a" Fpath.pp registry_toml); 97 102 load ~fs registry_toml 98 103 end ··· 117 122 (try Eio.Path.mkdirs ~perm:0o755 registry_eio with Eio.Io _ -> ()); 118 123 (* Initialize as git repo *) 119 124 (match Git.init ~proc ~fs registry_path with 120 - | Ok () -> () 121 - | Error _ -> ()); 125 + | Ok () -> () 126 + | Error _ -> ()); 122 127 (* Create empty registry file *) 123 128 (match save ~fs registry_toml empty_registry with 124 - | Ok () -> () 125 - | Error _ -> ()); 129 + | Ok () -> () 130 + | Error _ -> ()); 126 131 Ok empty_registry 127 132 end 128 133 129 - let find_member t ~handle = 130 - List.find_opt (fun m -> m.handle = handle) t.members 134 + let find_member t ~handle = List.find_opt (fun m -> m.handle = handle) t.members 131 135 132 136 let find_members t ~handles = 133 137 List.filter (fun m -> List.mem m.handle handles) t.members
+2 -2
lib/verse_registry.mli
··· 29 29 config:Verse_config.t -> 30 30 unit -> 31 31 (t, string) result 32 - (** [clone_or_pull ~proc ~fs ~config ()] clones the registry if not present, 33 - or pulls updates if it exists. Returns the parsed registry contents. 32 + (** [clone_or_pull ~proc ~fs ~config ()] clones the registry if not present, or 33 + pulls updates if it exists. Returns the parsed registry contents. 34 34 35 35 The registry is cloned to [config.registry_path]. 36 36