···11+# Makefile for running health-checks on all available OPAM packages in parallel
22+# Usage: make -j<N> all (where N is the number of parallel jobs)
33+# make OUTPUT_DIR=/path/to/output all (to specify custom output directory)
44+# make OPAM_REPO=/path/to/packages all (to specify custom opam repository)
55+# make clean (to remove markdown files)
66+77+# OS target
88+SYSTEM := debian-12
99+1010+# Compiler versions - can be overridden on command line
1111+#COMPILERS := ocaml.4.08.1 ocaml.4.09.1 ocaml.4.10.2 ocaml.4.11.2 ocaml.4.12.1 ocaml.4.13.1 ocaml.4.14.2 ocaml.5.0.0 ocaml.5.1.1 ocaml.5.2.1 ocaml.5.3.0
1212+#COMPILERS := ocaml.4.08.2 ocaml.4.09.2 ocaml.4.10.3 ocaml.4.11.3 ocaml.4.12.2 ocaml.4.13.2 ocaml.4.14.3 ocaml.5.0.1 ocaml.5.1.2 ocaml.5.2.2 ocaml.5.3.1
1313+COMPILERS := ocaml-base-compiler.5.4.0~beta2
1414+1515+# Output directory - can be overridden on command line: make OUTPUT_DIR=/path/to/output
1616+#OUTPUT_DIR := output
1717+#OUTPUT_DIR := relocatable
1818+OUTPUT_DIR := output
1919+2020+# Path to the opam repository root (for git operations) - can be overridden
2121+OPAM_REPO := /home/mtelvers/opam-repository
2222+2323+# Output directory - can be overridden on command line: make OUTPUT_DIR=/path/to/output
2424+CACHE_DIR := /home/mtelvers/cache2
2525+2626+# Get the git commit SHA of the opam repository
2727+OPAM_SHA := $(shell git -C "$(OPAM_REPO)" rev-parse HEAD 2>/dev/null || echo "unknown")
2828+2929+# Get the list of packages from opam
3030+PACKAGES := $(shell ./_build/install/default/bin/day10 list --opam-repository "$(OPAM_REPO)")
3131+# PACKAGES := 0install.2.18 diffast-api.0.2 alcotest.1.9.0 bos.0.2.1 ansi.0.7.0
3232+3333+# --opam-repository /home/mtelvers/opam-repository-relocatable \
3434+3535+# Template to generate rules for each compiler version
3636+define COMPILER_TEMPLATE
3737+$$(OUTPUT_DIR)/$$(OPAM_SHA)/$$(SYSTEM)/$(1)/%.json: | $$(CACHE_DIR)
3838+ @mkdir -p $$(OUTPUT_DIR)/$$(OPAM_SHA)/$$(SYSTEM)/$(1)
3939+ ./_build/install/default/bin/day10 health-check \
4040+ --cache-dir "$$(CACHE_DIR)" \
4141+ --opam-repository "$$(OPAM_REPO)" \
4242+ --ocaml-version $(1) \
4343+ --json $$@ $$(basename $$(notdir $$@))
4444+endef
4545+4646+# Generate pattern rules for each compiler
4747+$(foreach compiler,$(COMPILERS),$(eval $(call COMPILER_TEMPLATE,$(compiler))))
4848+4949+# Generate all targets for all compiler/package combinations
5050+# Order by package first, then compiler (better resource distribution)
5151+TARGETS := $(foreach package,$(PACKAGES),$(foreach compiler,$(COMPILERS),$(OUTPUT_DIR)/$(OPAM_SHA)/$(SYSTEM)/$(compiler)/$(package).json))
5252+5353+# Default target - depends on all package health-checks for all compilers
5454+all: $(TARGETS)
5555+5656+$(CACHE_DIR):
5757+ mkdir -p $(CACHE_DIR)
5858+5959+$(OUTPUT_DIR)/commits.json:
6060+ @echo "[]" > $@.tmp
6161+ @for dir in $(OUTPUT_DIR)/*/; do \
6262+ if [ -d "$$dir" ]; then \
6363+ sha=$$(basename "$$dir"); \
6464+ echo "Processing SHA: $$sha"; \
6565+ git -C $(OPAM_REPO) show --pretty=format:'%H%x00%aI%x00%s%x00' -s "$$sha" 2>/dev/null | \
6666+ jq -R -s 'if . == "" then empty else split("\n")[0] | split("\u0000") | {"sha": .[0], "date": .[1], "message": .[2]} end' | \
6767+ jq -s 'if length > 0 then .[0] else {"sha": "'$$sha'", "date": null, "message": "Unknown commit"} end' > $@.entry && \
6868+ jq --slurpfile entry $@.entry '. += $$entry' $@.tmp > $@.tmp2 && \
6969+ mv $@.tmp2 $@.tmp; \
7070+ rm -f $@.entry; \
7171+ fi; \
7272+ done
7373+ @mv $@.tmp $@
7474+ @echo "JSON file generated: $@"
7575+7676+$(OUTPUT_DIR)/%/commit.json:
7777+ @echo "Generating flattened $@"
7878+ @{ \
7979+ sha=$$(basename $(@D)); \
8080+ for os_dir in $(@D)/*/; do \
8181+ if [ -d "$$os_dir" ]; then \
8282+ os=$$(basename "$$os_dir"); \
8383+ for compiler_dir in "$$os_dir"*/; do \
8484+ if [ -d "$$compiler_dir" ]; then \
8585+ compiler=$$(basename "$$compiler_dir"); \
8686+ json_files="$$compiler_dir"*.json; \
8787+ if ls $$json_files >/dev/null 2>&1; then \
8888+ cat $$json_files | jq --arg os "$$os" --arg compiler "$$compiler" --arg sha "$$sha" \
8989+ '. + {"os": $$os, "compiler": $$compiler, "sha": $$sha}'; \
9090+ fi; \
9191+ fi; \
9292+ done; \
9393+ fi; \
9494+ done; \
9595+ } | jq -s '.' > $@
9696+9797+json: $(OUTPUT_DIR)/commits.json $(foreach dir,$(wildcard output/*),$(dir)/commit.json)
9898+9999+$(OUTPUT_DIR)/%/commit.parquet: $(OUTPUT_DIR)/%/commit.json
100100+ @echo "Converting $< to Parquet format"
101101+ clickhouse local --query "SELECT * FROM file('$<', 'JSONEachRow') INTO OUTFILE '$@' FORMAT Parquet"
102102+103103+$(OUTPUT_DIR)/%/commit-with-logs.json:
104104+ @echo "Generating flattened $@ with build logs using Python"
105105+ python3 process_with_logs.py $(@D) --cache-dir $(CACHE_DIR) --output-json $@
106106+107107+$(OUTPUT_DIR)/%/commit-with-logs.parquet: $(OUTPUT_DIR)/%/commit-with-logs.json
108108+ @echo "Converting $< to Parquet format"
109109+ clickhouse local --query "SELECT * FROM file('$<', 'JSONEachRow') INTO OUTFILE '$@' FORMAT Parquet"
110110+111111+# Combined target to generate both JSON and Parquet with build logs
112112+$(OUTPUT_DIR)/%/commit-with-logs: $(OUTPUT_DIR)/%/commit-with-logs.json $(OUTPUT_DIR)/%/commit-with-logs.parquet
113113+ @echo "Generated both JSON and Parquet files with build logs for $(@D)"
114114+115115+copy:
116116+ @find $(CACHE_DIR) -maxdepth 2 \( -name "layer.json" -o -name "build.log" \) -print0 | \
117117+ xargs -0 -P $(shell nproc) -I {} sh -c 'd=$${1%/*}; d=$${d##*/}; mkdir -p $(OUTPUT_DIR)/cache/$$d; cp -l "$$1" $(OUTPUT_DIR)/cache/$$d/' _ {}
118118+119119+# Clean up json files for all compilers
120120+clean:
121121+ rm -rf $(foreach compiler,$(COMPILERS),$(OUTPUT_DIR)/$(OPAM_SHA)/$(SYSTEM)/$(compiler))
122122+123123+# Show the list of packages that will be processed for each compiler
124124+list:
125125+ @echo "Packages to process (from $(OPAM_REPO)/packages):"
126126+ @$(foreach compiler,$(COMPILERS),echo "Compiler $(compiler): $(OUTPUT_DIR)/$(OPAM_SHA)/$(SYSTEM)/$(compiler)";)
127127+ @echo "Packages:"
128128+ @echo $(PACKAGES) | tr ' ' '\n'
129129+130130+# Count total packages across all compilers
131131+count:
132132+ @echo "Total packages per compiler: $(words $(PACKAGES))"
133133+ @echo "Total compilers: $(words $(COMPILERS))"
134134+ @echo "Total targets: $(words $(TARGETS))"
135135+136136+# Targets for building with specific compilers
137137+$(foreach compiler,$(COMPILERS),$(eval $(compiler): $(addprefix $(OUTPUT_DIR)/$(OPAM_SHA)/$(SYSTEM)/$(compiler)/, $(addsuffix .json, $(PACKAGES)))))
138138+139139+next:
140140+ git -C $(OPAM_REPO) fetch --all
141141+ next_merge=$$(git -C $(OPAM_REPO) log --merges --format="%H" --reverse HEAD..upstream/master | head -1); \
142142+ if [ -z "$$next_merge" ]; then \
143143+ echo "No merge commits found ahead of current position in upstream/master"; \
144144+ exit 1; \
145145+ fi; \
146146+ echo "Moving to next merge commit: $$next_merge"; \
147147+ git -C $(OPAM_REPO) log --oneline -1 $$next_merge; \
148148+ git -C $(OPAM_REPO) checkout $$next_merge
149149+150150+parquet: $(foreach dir,$(wildcard $(OUTPUT_DIR)/*),$(dir)/commit.parquet)
151151+152152+.PHONY: all clean list count parquet $(COMPILERS)
+122
day10/README.md
···11+22+```
33+./_build/install/default/bin/day10 health-check --cache-dir /home/mtelvers/cache/ --opam-repository /home/mtelvers/opam-repository 0install.2.18
44+./_build/install/default/bin/day10 health-check --cache-dir /home/mtelvers/cache/ --opam-repository /home/mtelvers/opam-repository obuilder.0.6.0
55+./_build/install/default/bin/day10 health-check --cache-dir /home/mtelvers/cache/ --opam-repository /home/mtelvers/opam-repository cohttp.6.1.0
66+./_build/install/default/bin/day10 health-check --cache-dir /home/mtelvers/cache/ --opam-repository /home/mtelvers/opam-repository odoc.3.0.0
77+```
88+99+```
1010+./_build/install/default/bin/day10 ci --cache-dir /home/mtelvers/cache/ --opam-repository /home/mtelvers/opam-repository /home/mtelvers/day10
1111+```
1212+1313+1414+# Windows
1515+1616+Remove Windows Defender
1717+1818+```
1919+dism /online /disable-feature /featurename:Windows-Defender /remove /norestart
2020+```
2121+2222+Install OpenSSH and configure (Windows Server 2022 only)
2323+2424+```
2525+curl.exe -L https://github.com/PowerShell/Win32-OpenSSH/releases/download/v9.2.2.0p1-Beta/OpenSSH-Win64-v9.2.2.0.msi -o openssh-win64.msi
2626+start /wait msiexec /q /norestart /i openssh-win64.msi
2727+copy id_ed25519.pub c:\programdata\ssh\administrators_authorized_keys
2828+netsh advfirewall firewall set rule name="OpenSSH SSH Server Preview (sshd)" new profile=any enable=yes
2929+```
3030+3131+On Windows Server 2025, SSHD is already installed, but not enabled.
3232+3333+```
3434+sc config sshd start=auto
3535+net start sshd
3636+copy id_ed25519.pub c:\programdata\ssh\administrators_authorized_keys
3737+netsh advfirewall firewall set rule name="OpenSSH SSH Server (sshd)" new profile=any enable=yes
3838+```
3939+4040+Install Git and ensure you restart your shell before continuing.
4141+4242+```
4343+curl.exe -L https://github.com/git-for-windows/git/releases/download/v2.50.0.windows.1/Git-2.50.0-64-bit.exe -o c:\windows\temp\git.exe
4444+start /wait c:\windows\temp\git.exe /VERYSILENT /NORESTART /NOCANCEL /SP- /CLOSEAPPLICATIONS /RESTARTAPPLICATIONS /TASKS="addtopath"
4545+```
4646+4747+Install Containerd. On the last line selection `ltsc2025` if using Windows Server 2025.
4848+4949+```
5050+curl.exe https://raw.githubusercontent.com/microsoft/Windows-Containers/refs/heads/Main/helpful_tools/Install-ContainerdRuntime/install-containerd-runtime.ps1 -o install-containerd-runtime.ps1
5151+Set-ExecutionPolicy Bypass
5252+.\install-containerd-runtime.ps1 -ContainerDVersion 2.1.3 -WinCNIVersion 0.3.1 -ExternalNetAdapter Ethernet -ContainerBaseImage mcr.microsoft.com/windows/servercore:ltsc2022
5353+```
5454+5555+Create `C:\Program Files\containerd\cni\conf\0-containerd-nat.conf` containing
5656+5757+```
5858+{
5959+ "cniVersion": "0.3.0",
6060+ "name": "nat",
6161+ "type": "nat",
6262+ "master": "Ethernet",
6363+ "ipam": {
6464+ "subnet": "172.20.0.0/16",
6565+ "routes": [
6666+ {
6767+ "gateway": "172.20.0.1"
6868+ }
6969+ ]
7070+ },
7171+ "capabilities": {
7272+ "portMappings": true,
7373+ "dns": true
7474+ }
7575+}
7676+```
7777+7878+Install opam
7979+8080+```
8181+curl.exe -L https://github.com/ocaml/opam/releases/download/2.3.0/opam-2.3.0-x86_64-windows.exe -o c:\windows\opam.exe
8282+opam init -y
8383+```
8484+8585+Download and build mtelvers/hcn-namespace
8686+8787+```
8888+git clone https://github.com/mtelvers/hcn-namespace
8989+cd hcn-namespace
9090+opam install . --deps-only
9191+for /f "tokens=*" %i in ('opam env') do @%i
9292+dune build
9393+copy _build\install\default\bin\hcn-namespace.exe %LocalAppData%\opam\.cygwin\root\usr\local\bin
9494+```
9595+9696+Build this project
9797+9898+```
9999+git clone https://github.com/mtelvers/ohc -b tool
100100+cd ohc
101101+opam install . --deps-only
102102+dune build
103103+```
104104+105105+Run
106106+107107+```
108108+git clone http://github.com/ocaml/opam-repository c:\opam-repository
109109+mkdir c:\cache
110110+make -j 6 SYSTEM=windows-x86_64 OUTPUT_DIR=./output CACHE_DIR=c:\\cache OPAM_REPO=c:\\opam-repository all
111111+```
112112+113113+114114+115115+116116+Next commit
117117+118118+```
119119+NEXT_MERGE=$(git rev-list --merges --reverse HEAD..upstream/master | head -1)
120120+git checkout $NEXT_MERGE
121121+```
122122+
+475
day10/analysis/REPORT.md
···11+# Universe Compatibility Solver: Algorithm Analysis Report
22+33+## Problem Statement
44+55+Given a collection of pre-solved package dependency "universes" — where each
66+universe is the complete transitive dependency solution for one version of one
77+package — find a compatible subset of universes covering a set of desired
88+packages. Two universes are **compatible** if every package that appears in both
99+is at the same version.
1010+1111+### Formal Definition
1212+1313+- A **universe** `U` maps package names to versions: `U.deps : name -> version`
1414+- `U.target` is the package that `U` was solved for
1515+- Given desired packages `{p1, ..., pk}`, find universes `{U1, ..., Uk}` where:
1616+ - `Ui.target.name = pi` for all `i`
1717+ - For all `i,j` and all package names `n` in both `Ui.deps` and `Uj.deps`:
1818+ `Ui.deps(n) = Uj.deps(n)`
1919+2020+### Complexity
2121+2222+This is a **Constraint Satisfaction Problem (CSP)**. In the worst case, it
2323+reduces to finding a k-clique in a compatibility graph, which is NP-hard.
2424+However, real package ecosystems have exploitable structure:
2525+2626+- The number of desired packages `k` is typically small (2-10)
2727+- Version conflicts cluster around a few "pivotal" packages (especially OCaml
2828+ compiler versions)
2929+- Most package pairs have no shared dependencies and are trivially compatible
3030+3131+## Algorithms Implemented
3232+3333+Six algorithms were implemented in OCaml (see `universe_compat.ml`):
3434+3535+### 1. Brute Force
3636+Enumerate all combinations of candidate universes (one per desired package).
3737+For each combination, check pairwise compatibility by incrementally merging
3838+dependency maps.
3939+4040+- **Complexity:** O(V1 * V2 * ... * Vk * D) where Vi = candidates for package
4141+ i, D = average deps size
4242+- **Strength:** Minimal overhead, fastest for small domains
4343+- **Weakness:** Exponential in k; no pruning
4444+4545+### 2. Backtracking with Forward Checking (Backtrack+FC)
4646+Standard CSP solver: assign one universe at a time, maintaining a merged
4747+dependency map. After each assignment, prune remaining candidate domains by
4848+removing any candidate that conflicts with the current merged state. Backtrack
4949+immediately if any domain becomes empty.
5050+5151+- **Complexity:** Same worst case as brute force, but prunes aggressively
5252+- **Strength:** Excellent pruning for structured problems; detects dead-ends early
5353+- **Weakness:** More per-node overhead than brute force
5454+5555+### 3. AC-3 + Backtracking
5656+Pre-process all domains using the AC-3 arc consistency algorithm before
5757+backtracking. AC-3 iteratively removes a candidate from domain `i` if no
5858+candidate in domain `j` is compatible with it, repeating until stable. Then
5959+runs Backtrack+FC on the reduced domains.
6060+6161+- **Complexity:** O(e * d^3) for AC-3 preprocessing where e = constraint edges,
6262+ d = max domain size, plus backtracking
6363+- **Strength:** Can detect impossibility before search begins
6464+- **Weakness:** Quadratic preprocessing cost; builds pairwise compatibility
6565+ matrices that are expensive for large domains
6666+6767+### 4. Greedy with Minimum Remaining Values (Greedy+MRV)
6868+Sort desired packages by domain size ascending (most constrained first — the
6969+MRV heuristic from CSP literature). Then run Backtrack+FC in this order.
7070+7171+- **Complexity:** Same as Backtrack+FC but with better variable ordering
7272+- **Strength:** Processing the most constrained variable first prunes more of
7373+ the search tree
7474+- **Weakness:** Sorting overhead; MRV ordering is a heuristic, not always optimal
7575+7676+### 5. Signature-Based Clustering
7777+Identify "pivotal" packages — those that appear across multiple desired
7878+packages' candidate universes with differing versions. Compute a signature for
7979+each universe based only on its pivotal dependency versions. Group candidates by
8080+signature, then search signature groups instead of individual candidates.
8181+8282+- **Complexity:** Depends on the number of distinct signatures; best case
8383+ collapses exponential search to linear
8484+- **Strength:** Exploits the structure of real package ecosystems where OCaml
8585+ version dominates compatibility
8686+- **Weakness:** Signature computation and grouping overhead; less effective when
8787+ pivotal set is large
8888+8989+### 6. Dependency Fingerprint Hashing
9090+For each pair of desired packages, compute the set of shared dependency names.
9191+Hash each candidate universe on just its shared dependencies. Use these
9292+fingerprints for fast compatibility filtering. Falls back to full compatibility
9393+checking within matching fingerprint groups.
9494+9595+- **Complexity:** O(k^2 * V * D) preprocessing, then filtered backtracking
9696+- **Strength:** Theoretically good for high-overlap scenarios
9797+- **Weakness:** Significant preprocessing overhead; the filtering benefit is
9898+ eaten by the setup cost in practice
9999+100100+## Test Suite
101101+102102+### Correctness Tests (Phase 1)
103103+104104+| Test Case | Description | Expected | Result |
105105+|-----------|-------------|----------|--------|
106106+| Basic | a(v1=OCaml4.14, v2=OCaml5), b(v1=OCaml4.14); want {a,b} | Compatible: a.1, b.1 | PASS (all 6 agree) |
107107+| Extended: d+e | d(needs OCaml5+a.2), e(needs OCaml4.14+a.1); want {d,e} | Incompatible | PASS (all 6 agree) |
108108+| Extended: a+b | Same universe; want {a,b} | Compatible: a.1, b.1 | PASS (all 6 agree) |
109109+| Extended: a+b+c | c appears in both OCaml clusters; want {a,b,c} | Compatible: a.1, b.1, c.1 | PASS (all 6 agree) |
110110+| Impossible | x(shared=v1), y(shared=v2); want {x,y} | Incompatible | PASS (all 6 agree) |
111111+112112+All algorithms produce consistent results across all test cases.
113113+114114+### Benchmark Configurations (Phases 2-6)
115115+116116+Synthetic data generation creates realistic package ecosystems with:
117117+- Configurable number of packages, versions per package, shared dependency pool
118118+- OCaml version correlation (version selection biased by OCaml version to create
119119+ realistic clustering)
120120+- Variable dependency density
121121+122122+## Synthetic Timing Results
123123+124124+All times are averages per query in milliseconds, measured on this machine
125125+(Linux, OCaml 5.4.0, native code). Agreement was 100% across all trials.
126126+127127+### Summary Table
128128+129129+| Scenario | Universes | Desired | Brute | BT+FC | AC-3+BT | Greedy+MRV | Signature | Fingerprint |
130130+|----------|-----------|---------|-------|-------|---------|------------|-----------|-------------|
131131+| Tiny (10 pkg) | 48 | 2 | 0.002 | 0.003 | 0.008 | 0.003 | 0.024 | 0.032 |
132132+| Small (50 pkg) | 377 | 3 | 0.002 | 0.008 | 0.099 | 0.012 | 0.079 | 0.222 |
133133+| Medium (200 pkg) | 1,501 | 3 | 0.004 | 0.014 | 0.195 | 0.019 | 0.159 | 0.433 |
134134+| Medium (200 pkg) | 1,501 | 5 | 0.009 | 0.036 | 0.603 | 0.045 | 0.284 | 1.440 |
135135+| Large (500 pkg) | 5,954 | 3 | 0.007 | 0.039 | 0.731 | 0.053 | 0.435 | 1.348 |
136136+| Large (500 pkg) | 5,954 | 5 | 0.018 | 0.113 | 2.396 | 0.130 | 0.732 | 4.408 |
137137+| Large (500 pkg) | 5,954 | 8 | 0.039 | 0.274 | 6.591 | 0.290 | 1.201 | 12.470 |
138138+| XL (2000 pkg) | 23,917 | 3 | 0.013 | 0.061 | 1.195 | 0.073 | 0.817 | 2.569 |
139139+| XL (2000 pkg) | 23,917 | 5 | 0.030 | 0.153 | 3.829 | 0.178 | 1.530 | 8.674 |
140140+| XL (2000 pkg) | 23,917 | 10 | 0.140 | 0.539 | 16.696 | 0.577 | 3.131 | 38.275 |
141141+| Patho (20 ver) | 2,999 | 5 | 0.005 | 0.047 | 3.180 | 0.053 | 0.321 | 1.825 |
142142+| Patho (20 ver) | 2,999 | 10 | 0.030 | 0.355 | 40.310 | 0.389 | 1.877 | 22.658 |
143143+| Extreme overlap | 24,032 | 5 | 0.029 | 0.184 | 4.332 | 0.189 | 1.077 | 4.954 |
144144+145145+*(All values in milliseconds)*
146146+147147+### Scaling Analysis
148148+149149+**Brute Force** is consistently the fastest algorithm. This is initially
150150+surprising but explained by the problem structure:
151151+152152+1. **Small per-candidate domains:** Even with 2000 packages and 8 versions each,
153153+ we're only searching through candidates for the *desired* packages (typically
154154+ 8-16 universes per desired package), not all 24,000 universes.
155155+156156+2. **Early termination:** The brute force finds a solution (or proves
157157+ impossibility) quickly because:
158158+ - Compatible solutions tend to exist and be found early
159159+ - The incremental merged-map check provides implicit pruning (failing fast
160160+ on the first conflict)
161161+162162+3. **Minimal overhead:** No preprocessing, no data structure setup, no domain
163163+ copying — just a tight loop with map operations.
164164+165165+**Backtracking+FC and Greedy+MRV** are close seconds. The forward checking
166166+prunes dead-end branches but the overhead of copying and filtering domain arrays
167167+outweighs the savings for these problem sizes. The MRV heuristic provides
168168+marginal benefit.
169169+170170+**AC-3** is consistently 50-100x slower than brute force. The O(e * d^3)
171171+preprocessing to build pairwise compatibility matrices dominates. This would
172172+only pay off if the search tree were much deeper and wider — i.e., if brute
173173+force were actually exploring exponentially many combinations.
174174+175175+**Signature-Based Clustering** is 10-30x slower than brute force. The overhead
176176+of computing signatures, building hash tables, and grouping candidates doesn't
177177+pay off because the underlying search is already fast.
178178+179179+**Fingerprint Hashing** is the slowest algorithm, 100-300x slower than brute
180180+force. The O(k^2 * V * D) preprocessing to compute shared-name sets and
181181+fingerprint tables is prohibitively expensive relative to the actual search
182182+cost.
183183+184184+---
185185+186186+## Real-World Results
187187+188188+Two real-world runs were performed using `day10 batch --dry-run` against opam
189189+repository commit `54aaf73d7a`:
190190+191191+1. **Latest-only**: Latest version of each package (4,519 universes)
192192+2. **Full**: Every version of every package (18,388 universes)
193193+194194+### Latest-Only Run (4,519 universes)
195195+196196+| Metric | Value |
197197+|--------|-------|
198198+| Total universes | 4,519 |
199199+| Distinct packages | 4,483 |
200200+| Avg versions per package | 1.0 |
201201+| Dependencies per universe | min=1, median=20, avg=39.8, max=297 |
202202+203203+**Pairwise: 10,046,403 pairs in 23.0 s** — 57.2% compatible, 42.8% incompatible.
204204+205205+### Full Run (18,388 universes) — Every Version of Every Package
206206+207207+| Metric | Value |
208208+|--------|-------|
209209+| Total universes | 18,388 |
210210+| Distinct packages | 4,491 |
211211+| Avg versions per package | **4.1** |
212212+| Versions per package | min=1, median=3, **max=70** |
213213+| Dependencies per universe | min=1, median=21, avg=40.7, max=372 |
214214+| Opam repo versions attempted | 19,272 |
215215+| Solutions found | 18,388 |
216216+| Solve failures | 884 |
217217+218218+Top packages by version count: `ocaml-base-compiler` (70), `menhir` (50),
219219+`archetype` (47), `binaryen` (44), `ppx_irmin` (44), `coq` (40).
220220+221221+#### OCaml Version Distribution (Full Run)
222222+223223+| OCaml Version | Universes | Percentage |
224224+|---------------|-----------|------------|
225225+| 5.4.0 | 9,128 | 49.6% |
226226+| 4.14.2 | 4,338 | 23.6% |
227227+| 5.3.0 | 1,100 | 6.0% |
228228+| 5.2.1 | 657 | 3.6% |
229229+| 4.11.2 | 594 | 3.2% |
230230+| 4.09.1 | 469 | 2.6% |
231231+| 4.12.1 | 465 | 2.5% |
232232+| 5.0.0 | 342 | 1.9% |
233233+| 5.1.1 | 256 | 1.4% |
234234+| 4.13.1 | 167 | 0.9% |
235235+| 4.08.1 | 136 | 0.7% |
236236+| 4.10.2 | 101 | 0.5% |
237237+| Other | 49 | 0.3% |
238238+239239+With all versions solved, OCaml 5.4.0 drops to 49.6% (from 65.6%) and
240240+OCaml 4.14.2 grows to 23.6% (from 17.2%), because older package versions
241241+pull in older OCaml compilers.
242242+243243+### Exhaustive Pairwise Results (Full Run)
244244+245245+**10,082,295 pairs tested in 80.8 seconds** (0.008 ms per pair).
246246+247247+| Result | Count | Percentage |
248248+|--------|-------|------------|
249249+| Compatible | 6,420,071 | **63.7%** |
250250+| Incompatible | 3,662,224 | **36.3%** |
251251+252252+Having multiple versions per package **increased pairwise compatibility from
253253+57.2% to 63.7%** — a 6.5 percentage point improvement. This is because the
254254+solver now has more candidate universes per package and can find version
255255+combinations that agree on shared dependency versions.
256256+257257+### Conflict Analysis (Full Run)
258258+259259+A deeper analysis (see `conflict_analysis.ml`) classified every incompatible
260260+pair by checking all candidate universe pairs, not just the first:
261261+262262+| Category | Count | % of Incompatible |
263263+|----------|-------|-------------------|
264264+| **OCaml + other conflicts** | 3,070,853 | **83.9%** |
265265+| **Non-OCaml conflicts** | 591,371 | **16.1%** |
266266+| **OCaml-only conflicts** | 0 | **0.0%** |
267267+268268+**Zero incompatible pairs are caused by OCaml version alone.** In every single
269269+case where two packages are incompatible, even if OCaml version were magically
270270+ignored, other dependency version disagreements would still prevent
271271+compatibility. OCaml version is a *marker* of ecosystem divergence, not the
272272+root cause.
273273+274274+The 83.9% "OCaml + other" category represents packages from fundamentally
275275+different eras of the ecosystem — they don't share any candidate pair with
276276+the same OCaml version, and their entire dependency trees have drifted apart.
277277+278278+#### True Conflict-Causing Dependencies
279279+280280+When OCaml version is *not* the issue (591K pairs that share an OCaml version
281281+but are still incompatible), these are the top conflict-causing packages:
282282+283283+| Package | Pairs | Notes |
284284+|---------|-------|-------|
285285+| dune | 238,766 | Build system version differences |
286286+| sexplib0 | 204,754 | Jane Street S-expression library |
287287+| ppxlib | 157,049 | PPX preprocessing framework |
288288+| dune-configurator | 145,529 | Dune config detection |
289289+| lwt | 142,460 | Async library |
290290+| base | 124,542 | Jane Street's stdlib replacement |
291291+| cmdliner | 101,296 | CLI parsing library |
292292+| re | 96,053 | Regular expressions |
293293+| ppx_sexp_conv | 96,036 | Jane Street PPX |
294294+| stdio | 80,415 | Jane Street I/O |
295295+296296+The Jane Street PPX ecosystem (`sexplib0`, `ppxlib`, `ppx_sexp_conv`,
297297+`ppx_inline_test`, `ppx_compare`, etc.) and `dune` are the true drivers of
298298+within-OCaml-version incompatibility.
299299+300300+#### Best-Pair Analysis
301301+302302+For each incompatible pair, finding the candidate pair with the *fewest*
303303+conflicts gives the "minimum distance to compatibility":
304304+305305+| Dependency | Pairs (% of incompatible) | Notes |
306306+|------------|--------------------------|-------|
307307+| ocaml-base-compiler | 85.7% | Tracks OCaml version |
308308+| ocaml | 85.6% | Tracks OCaml version |
309309+| ocaml-config | 61.7% | Tracks OCaml version |
310310+| sexplib0 | 19.4% | True ecosystem split |
311311+| dune | 16.7% | True ecosystem split |
312312+| ppxlib | 16.1% | True ecosystem split |
313313+| ocaml-compiler-libs | 12.8% | Tracks OCaml version |
314314+| base | 8.9% | |
315315+| cmdliner | 8.6% | |
316316+| dune-configurator | 8.1% | |
317317+318318+This confirms that ~86% of incompatible pairs have no candidate combination
319319+sharing an OCaml major version. But for the 14% that do, `sexplib0`, `dune`,
320320+and `ppxlib` are the packages that block compatibility.
321321+322322+### Most/Least Compatible Packages (Full Run)
323323+324324+**196 packages (4.4%) are compatible with every other package** (up from 181).
325325+326326+**0 packages are compatible with nothing** — every package can be paired with
327327+at least one other.
328328+329329+The most incompatible packages (~92% incompatible with others) remain those
330330+locked to very old OCaml versions: `rescript-syntax`, `tezt-performance-regression`,
331331+`ast_generic`, `learn-ocaml`.
332332+333333+### Compatibility Rate Distribution (Full Run)
334334+335335+| Compatibility Range | Pkg Count (Full) | Pkg Count (Latest) | Change |
336336+|--------------------|-----------------|-------------------|--------|
337337+| 0-10% | 96 | 417 | -321 |
338338+| 10-20% | 412 | 612 | -200 |
339339+| 20-30% | 340 | 179 | +161 |
340340+| 30-40% | 191 | 1 | +190 |
341341+| 40-50% | 131 | 15 | +116 |
342342+| 50-60% | 81 | 122 | -41 |
343343+| 60-70% | 342 | 764 | -422 |
344344+| 70-80% | 1,947 | 2,036 | -89 |
345345+| 80-90% | 452 | 4 | +448 |
346346+| 90-100% | 303 | 152 | +151 |
347347+| 100% | 196 | 181 | +15 |
348348+349349+The distribution **smooths out dramatically** with multiple versions available.
350350+The 0-10% bucket shrank from 417 to 96 packages — many packages that were
351351+incompatible now have an older version that works. The 80-90% bucket exploded
352352+from 4 to 452. The ecosystem becomes much more interconnectable when you can
353353+pick different versions.
354354+355355+### N-Way Compatibility (Full Run, Sampled)
356356+357357+| Packages | Compatible (Full) | Compatible (Latest) | Avg Query Time |
358358+|----------|-------------------|--------------------| --------------|
359359+| 2 | **62.8%** | 58.5% | 0.010 ms |
360360+| 3 | **42.3%** | 36.5% | 0.020 ms |
361361+| 5 | **18.9%** | 16.6% | 0.058 ms |
362362+| 8 | **5.7%** | 4.5% | 0.238 ms |
363363+| 10 | **2.5%** | 2.1% | 0.607 ms |
364364+| 15 | **0.3%** | 0.3% | 9.4 ms |
365365+| 20 | ~0% | ~0% | 2.8 ms |
366366+| 30 | 0% | 0% | 4.2 ms |
367367+| 50 | 0% | 0% | 17.0 ms |
368368+369369+*(10,000 random samples for 2-10 packages; 5,000 for 15-20; 2,000 for 30; 1,000 for 50)*
370370+371371+Having multiple versions improves compatibility at every level, with the
372372+largest gains for 2-3 package queries. The N-way query times are still
373373+sub-millisecond up to 8 packages, and under 20ms even for 50 packages.
374374+375375+Note that the per-query time increased ~4x from the latest-only run (e.g.
376376+0.0023ms to 0.010ms for pairs). This is expected: with 4.1 versions per
377377+package on average, the solver explores ~4x more candidates. Still very fast.
378378+379379+The times for 15 and 20 packages show an interesting non-monotonicity (9.4ms
380380+vs 2.8ms). This is because at 15 packages, the solver frequently explores
381381+deeply before proving incompatibility, while at 20 packages it typically fails
382382+faster because the constraints are even more over-determined.
383383+384384+### Key Observation: Version Diversity Helps Significantly
385385+386386+The full-version deployment increased pairwise compatibility by 6.5 percentage
387387+points (57.2% -> 63.7%). The 0-10% incompatibility bucket shrank by 77%.
388388+This confirms that having **every version of every package** is valuable —
389389+it gives the compatibility solver more degrees of freedom to find working
390390+combinations by picking older versions that happen to share dependency versions.
391391+392392+---
393393+394394+## Key Insight
395395+396396+The critical observation is that while this problem is NP-hard in the general
397397+case, the **actual instance structure** of package dependency solving makes it
398398+easy:
399399+400400+1. **Domain sizes are small.** Each desired package has only `versions_per_pkg *
401401+ n_ocaml_versions` candidate universes — typically 1-30. The total number of
402402+ universes (potentially 24,000+) is irrelevant because we only look at
403403+ candidates for the desired packages.
404404+405405+2. **The effective branching factor is tiny.** Once OCaml version is fixed by
406406+ the first assignment, most other candidates are immediately eliminated. The
407407+ search tree has depth k but effective branching factor close to 1.
408408+409409+3. **Solutions are usually dense.** In a real ecosystem, compatible solutions
410410+ tend to exist (63.7% pairwise compatibility), so the search terminates at
411411+ the first leaf.
412412+413413+4. **The hard case (incompatibility) is also fast.** When no solution exists,
414414+ the constraints typically eliminate all possibilities within the first 2-3
415415+ assignments.
416416+417417+5. **Incompatibility is ecosystemic, not single-package.** Deeper analysis
418418+ reveals that 0% of incompatible pairs are caused by a single dependency
419419+ version disagreement alone. Rather, 84% of incompatible pairs come from
420420+ fundamentally different eras of the ecosystem (different OCaml major version
421421+ *and* different versions of dune, ppxlib, sexplib0, etc.). The remaining
422422+ 16% share an OCaml version but are split by the Jane Street PPX/dune
423423+ ecosystem versioning. This means the problem effectively partitions into
424424+ a few large compatibility clusters.
425425+426426+## Recommendations
427427+428428+### For Production Use
429429+430430+**Use Brute Force with incremental merging.** It is:
431431+- The fastest algorithm across all tested scales (0.002 ms per pairwise query
432432+ on 4,519 real universes; 23 seconds for exhaustive 10M-pair sweep)
433433+- The simplest to implement and maintain
434434+- Correct (100% agreement with all other solvers on synthetic data)
435435+- Memory-efficient (no preprocessing data structures)
436436+437437+The implementation should:
438438+1. For each desired package, collect its candidate universes (one per version
439439+ per OCaml-version-variant)
440440+2. Try combinations, maintaining a merged `StringMap` of
441441+ `package_name -> version`
442442+3. On each assignment, check the new universe's deps against the merged map;
443443+ if compatible, merge and recurse
444444+4. Return the first compatible set found, or `Incompatible`
445445+446446+### Optional Enhancement
447447+448448+If profiling shows that incompatible queries are common and slow (because
449449+they must exhaust the full search), add a **single optimization**:
450450+sort the desired packages by domain size ascending (MRV). This is essentially
451451+the Greedy+MRV algorithm — negligible overhead, and it causes incompatibility
452452+to be detected faster by processing the most constrained packages first.
453453+454454+### When Would Heavier Algorithms Pay Off?
455455+456456+The more sophisticated algorithms (AC-3, Signature clustering) would become
457457+worthwhile if:
458458+- Desired package count `k` grows beyond ~15-20
459459+- Each desired package has hundreds of candidate universes
460460+- The compatibility graph is dense with conflicts (many "almost compatible"
461461+ but not quite solutions)
462462+463463+None of these conditions are expected in the OCaml package ecosystem.
464464+465465+## Files
466466+467467+- `analysis/universe_compat.ml` — All 6 algorithms, correctness tests,
468468+ synthetic data generation, and benchmarks
469469+- `analysis/real_world.ml` — Real-world pairwise analysis using cached solutions
470470+ from the opam repository
471471+- `analysis/conflict_analysis.ml` — Deep conflict classification: for each
472472+ incompatible pair, checks all candidate pairs to determine whether the
473473+ conflict is OCaml-version-only, OCaml-plus-others, or non-OCaml
474474+- `analysis/dune` — Build configuration
475475+- `analysis/REPORT.md` — This report
+370
day10/analysis/conflict_analysis.ml
···11+(*
22+ Deeper Conflict Analysis
33+ ========================
44+55+ For each incompatible package pair, analyze WHY they're incompatible:
66+ - Is it purely OCaml version? (all candidate pairs disagree on ocaml)
77+ - Would they be compatible if we ignored OCaml version?
88+ - What's the "deepest" conflict — the one that remains even within
99+ the same OCaml version cluster?
1010+*)
1111+1212+module StringMap = Map.Make(String)
1313+module StringSet = Set.Make(String)
1414+1515+type universe = {
1616+ target_name : string;
1717+ target_version : string;
1818+ deps : string StringMap.t;
1919+}
2020+2121+(* Reuse the JSON parser from real_world.ml *)
2222+type json =
2323+ | JString of string
2424+ | JBool of bool
2525+ | JList of json list
2626+ | JObj of (string * json) list
2727+ | JNull
2828+2929+let rec skip_ws s i =
3030+ if i >= String.length s then i
3131+ else match s.[i] with
3232+ | ' ' | '\t' | '\n' | '\r' -> skip_ws s (i + 1)
3333+ | _ -> i
3434+3535+let parse_string s i =
3636+ let buf = Buffer.create 64 in
3737+ let rec loop j =
3838+ if j >= String.length s then failwith "unterminated string"
3939+ else match s.[j] with
4040+ | '"' -> (Buffer.contents buf, j + 1)
4141+ | '\\' ->
4242+ if j + 1 >= String.length s then failwith "unterminated escape";
4343+ Buffer.add_char buf s.[j + 1];
4444+ loop (j + 2)
4545+ | c -> Buffer.add_char buf c; loop (j + 1)
4646+ in
4747+ loop (i + 1)
4848+4949+let rec parse_value s i =
5050+ let i = skip_ws s i in
5151+ if i >= String.length s then (JNull, i)
5252+ else match s.[i] with
5353+ | '"' ->
5454+ let (str, j) = parse_string s i in
5555+ (JString str, j)
5656+ | '{' -> parse_obj s (i + 1)
5757+ | '[' -> parse_list s (i + 1)
5858+ | 't' -> (JBool true, i + 4)
5959+ | 'f' -> (JBool false, i + 5)
6060+ | 'n' -> (JNull, i + 4)
6161+ | _ ->
6262+ let j = ref i in
6363+ while !j < String.length s && s.[!j] <> ',' && s.[!j] <> '}' && s.[!j] <> ']'
6464+ && s.[!j] <> ' ' && s.[!j] <> '\n' do
6565+ incr j
6666+ done;
6767+ (JString (String.sub s i (!j - i)), !j)
6868+6969+and parse_obj s i =
7070+ let i = skip_ws s i in
7171+ if i < String.length s && s.[i] = '}' then (JObj [], i + 1)
7272+ else
7373+ let pairs = ref [] in
7474+ let j = ref i in
7575+ let continue = ref true in
7676+ while !continue do
7777+ let ji = skip_ws s !j in
7878+ let (key, ji) = parse_string s ji in
7979+ let ji = skip_ws s ji in
8080+ let ji = ji + 1 in
8181+ let (value, ji) = parse_value s ji in
8282+ pairs := (key, value) :: !pairs;
8383+ let ji = skip_ws s ji in
8484+ if ji < String.length s && s.[ji] = ',' then j := ji + 1
8585+ else begin j := ji + 1; continue := false end
8686+ done;
8787+ (JObj (List.rev !pairs), !j)
8888+8989+and parse_list s i =
9090+ let i = skip_ws s i in
9191+ if i < String.length s && s.[i] = ']' then (JList [], i + 1)
9292+ else
9393+ let items = ref [] in
9494+ let j = ref i in
9595+ let continue = ref true in
9696+ while !continue do
9797+ let (value, ji) = parse_value s !j in
9898+ items := value :: !items;
9999+ let ji = skip_ws s ji in
100100+ if ji < String.length s && s.[ji] = ',' then j := ji + 1
101101+ else begin j := ji + 1; continue := false end
102102+ done;
103103+ (JList (List.rev !items), !j)
104104+105105+let json_member key = function
106106+ | JObj pairs -> (try List.assoc key pairs with Not_found -> JNull)
107107+ | _ -> JNull
108108+109109+let split_package_string s =
110110+ match String.index_opt s '.' with
111111+ | Some i -> (String.sub s 0 i, String.sub s (i + 1) (String.length s - i - 1))
112112+ | None -> (s, "")
113113+114114+let load_solution_file path =
115115+ let ic = open_in path in
116116+ let n = in_channel_length ic in
117117+ let s = Bytes.create n in
118118+ really_input ic s 0 n;
119119+ close_in ic;
120120+ let s = Bytes.to_string s in
121121+ try
122122+ let (json, _) = parse_value s 0 in
123123+ match json_member "failed" json with
124124+ | JBool true -> None
125125+ | _ ->
126126+ let pkg_str = match json_member "package" json with
127127+ | JString s -> s | _ -> failwith "no package field" in
128128+ let solution = match json_member "solution" json with
129129+ | JObj pairs -> pairs | _ -> failwith "no solution field" in
130130+ let (target_name, target_version) = split_package_string pkg_str in
131131+ let deps = List.fold_left (fun acc (pkg_str, _deps) ->
132132+ let (name, version) = split_package_string pkg_str in
133133+ StringMap.add name version acc
134134+ ) StringMap.empty solution in
135135+ Some { target_name; target_version; deps }
136136+ with e ->
137137+ Printf.eprintf "Warning: failed to parse %s: %s\n" path (Printexc.to_string e);
138138+ None
139139+140140+let load_all_solutions dir =
141141+ let entries = Sys.readdir dir in
142142+ let solutions = ref [] in
143143+ Array.iter (fun filename ->
144144+ if Filename.check_suffix filename ".json" then begin
145145+ let path = Filename.concat dir filename in
146146+ match load_solution_file path with
147147+ | Some u -> solutions := u :: !solutions
148148+ | None -> ()
149149+ end
150150+ ) entries;
151151+ !solutions
152152+153153+(** Check compatibility, optionally ignoring certain packages *)
154154+let compatible_ignoring ?(ignore_pkgs=StringSet.empty) u1 u2 =
155155+ let a, b =
156156+ if StringMap.cardinal u1.deps <= StringMap.cardinal u2.deps
157157+ then u1.deps, u2.deps
158158+ else u2.deps, u1.deps
159159+ in
160160+ StringMap.for_all (fun name ver ->
161161+ if StringSet.mem name ignore_pkgs then true
162162+ else match StringMap.find_opt name b with
163163+ | None -> true
164164+ | Some ver' -> String.equal ver ver'
165165+ ) a
166166+167167+(** Find ALL conflicting packages between two universes *)
168168+let find_all_conflicts u1 u2 =
169169+ let a, b =
170170+ if StringMap.cardinal u1.deps <= StringMap.cardinal u2.deps
171171+ then u1.deps, u2.deps
172172+ else u2.deps, u1.deps
173173+ in
174174+ StringMap.fold (fun name ver acc ->
175175+ match StringMap.find_opt name b with
176176+ | Some ver' when not (String.equal ver ver') -> (name, ver, ver') :: acc
177177+ | _ -> acc
178178+ ) a []
179179+180180+(** Check if ANY pair of candidates is compatible *)
181181+let any_pair_compatible univs_a univs_b =
182182+ List.exists (fun ua ->
183183+ List.exists (fun ub ->
184184+ compatible_ignoring ua ub
185185+ ) univs_b
186186+ ) univs_a
187187+188188+(** Check if ANY pair is compatible ignoring certain packages *)
189189+let any_pair_compatible_ignoring ignore_pkgs univs_a univs_b =
190190+ List.exists (fun ua ->
191191+ List.exists (fun ub ->
192192+ compatible_ignoring ~ignore_pkgs ua ub
193193+ ) univs_b
194194+ ) univs_a
195195+196196+let () =
197197+ let solutions_dir = if Array.length Sys.argv > 1 then Sys.argv.(1)
198198+ else "/cache/jons-agent/solutions/54aaf73d7a" in
199199+200200+ Printf.printf "Loading solutions from %s...\n%!" solutions_dir;
201201+ let all_universes = load_all_solutions solutions_dir in
202202+ Printf.printf "Loaded %d universes\n%!" (List.length all_universes);
203203+204204+ let by_package = List.fold_left (fun acc u ->
205205+ let existing = match StringMap.find_opt u.target_name acc with
206206+ | Some l -> l | None -> [] in
207207+ StringMap.add u.target_name (u :: existing) acc
208208+ ) StringMap.empty all_universes in
209209+210210+ let pkg_names = StringMap.bindings by_package |> List.map fst in
211211+ let n_packages = List.length pkg_names in
212212+ let pkg_arr = Array.of_list pkg_names in
213213+ Printf.printf "Found %d distinct packages\n\n%!" n_packages;
214214+215215+ (* ================================================================= *)
216216+ (* Detailed conflict classification *)
217217+ (* ================================================================= *)
218218+219219+ Printf.printf "=== Conflict Classification ===\n";
220220+ Printf.printf "For each incompatible pair, classify the conflict...\n%!";
221221+222222+ let ocaml_only = ref 0 in (* incompatible, but compatible if we ignore ocaml *)
223223+ let ocaml_plus_others = ref 0 in (* ocaml disagrees AND other things disagree too *)
224224+ let no_ocaml_conflict = ref 0 in (* incompatible, but ocaml version matches *)
225225+ let total_incompat = ref 0 in
226226+ let total_compat = ref 0 in
227227+228228+ (* For the "no_ocaml_conflict" cases, track what causes conflict *)
229229+ let non_ocaml_conflicts = Hashtbl.create 64 in
230230+231231+ (* For each incompatible pair, check across ALL candidate pairs *)
232232+ (* what the conflict pattern looks like *)
233233+ let ignore_ocaml = StringSet.singleton "ocaml" in
234234+235235+ (* Also track: per-pair, what are ALL the conflicting dep packages? *)
236236+ let conflict_tallies = Hashtbl.create 64 in (* dep_name -> count of pairs where it conflicts *)
237237+238238+ let t0 = Unix.gettimeofday () in
239239+240240+ for i = 0 to Array.length pkg_arr - 1 do
241241+ if i mod 200 = 0 && i > 0 then
242242+ Printf.printf " Progress: %d/%d...\n%!" i n_packages;
243243+ let pkg_a = pkg_arr.(i) in
244244+ let univs_a = match StringMap.find_opt pkg_a by_package with
245245+ | Some l -> l | None -> [] in
246246+ for j = i + 1 to Array.length pkg_arr - 1 do
247247+ let pkg_b = pkg_arr.(j) in
248248+ let univs_b = match StringMap.find_opt pkg_b by_package with
249249+ | Some l -> l | None -> [] in
250250+ if any_pair_compatible univs_a univs_b then
251251+ incr total_compat
252252+ else begin
253253+ incr total_incompat;
254254+ (* Classify: would ignoring ocaml version make them compatible? *)
255255+ let compat_without_ocaml =
256256+ any_pair_compatible_ignoring ignore_ocaml univs_a univs_b in
257257+ (* Check if ocaml version actually disagrees in all pairs *)
258258+ let has_same_ocaml_pair = List.exists (fun ua ->
259259+ List.exists (fun ub ->
260260+ let ov_a = StringMap.find_opt "ocaml" ua.deps in
261261+ let ov_b = StringMap.find_opt "ocaml" ub.deps in
262262+ match ov_a, ov_b with
263263+ | Some a, Some b -> String.equal a b
264264+ | None, None -> true
265265+ | _ -> true (* if one doesn't have ocaml, ocaml isn't the conflict *)
266266+ ) univs_b
267267+ ) univs_a in
268268+ if compat_without_ocaml then
269269+ incr ocaml_only
270270+ else if not has_same_ocaml_pair then
271271+ incr ocaml_plus_others
272272+ else begin
273273+ incr no_ocaml_conflict;
274274+ (* Find what DOES conflict — look at first pair with matching ocaml *)
275275+ let found = ref false in
276276+ List.iter (fun ua ->
277277+ if not !found then
278278+ List.iter (fun ub ->
279279+ if not !found then begin
280280+ let ov_a = StringMap.find_opt "ocaml" ua.deps in
281281+ let ov_b = StringMap.find_opt "ocaml" ub.deps in
282282+ let same_ocaml = match ov_a, ov_b with
283283+ | Some a, Some b -> String.equal a b
284284+ | _ -> true in
285285+ if same_ocaml then begin
286286+ found := true;
287287+ let conflicts = find_all_conflicts ua ub in
288288+ List.iter (fun (name, _, _) ->
289289+ let cur = match Hashtbl.find_opt non_ocaml_conflicts name with
290290+ | Some n -> n | None -> 0 in
291291+ Hashtbl.replace non_ocaml_conflicts name (cur + 1)
292292+ ) conflicts
293293+ end
294294+ end
295295+ ) univs_b
296296+ ) univs_a
297297+ end;
298298+299299+ (* For every incompatible pair, tally ALL conflicting deps across
300300+ the "best" candidate pair (the one with fewest conflicts) *)
301301+ let best_conflicts = ref [] in
302302+ let best_count = ref max_int in
303303+ List.iter (fun ua ->
304304+ List.iter (fun ub ->
305305+ let conflicts = find_all_conflicts ua ub in
306306+ let n = List.length conflicts in
307307+ if n > 0 && n < !best_count then begin
308308+ best_count := n;
309309+ best_conflicts := conflicts
310310+ end
311311+ ) univs_b
312312+ ) univs_a;
313313+ List.iter (fun (name, _, _) ->
314314+ let cur = match Hashtbl.find_opt conflict_tallies name with
315315+ | Some n -> n | None -> 0 in
316316+ Hashtbl.replace conflict_tallies name (cur + 1)
317317+ ) !best_conflicts
318318+ end
319319+ done
320320+ done;
321321+322322+ let t1 = Unix.gettimeofday () in
323323+ Printf.printf "\nAnalysis completed in %.1f seconds\n\n" (t1 -. t0);
324324+325325+ let total = !total_compat + !total_incompat in
326326+ Printf.printf "=== Results ===\n\n";
327327+ Printf.printf "Total pairs: %d\n" total;
328328+ Printf.printf "Compatible: %d (%.1f%%)\n" !total_compat
329329+ (100.0 *. float_of_int !total_compat /. float_of_int total);
330330+ Printf.printf "Incompatible: %d (%.1f%%)\n\n" !total_incompat
331331+ (100.0 *. float_of_int !total_incompat /. float_of_int total);
332332+333333+ Printf.printf "--- Incompatible Pair Classification ---\n\n";
334334+ Printf.printf " OCaml-only conflicts: %7d (%5.1f%% of incompatible)\n"
335335+ !ocaml_only
336336+ (100.0 *. float_of_int !ocaml_only /. float_of_int !total_incompat);
337337+ Printf.printf " (would be compatible if OCaml version were ignored)\n\n";
338338+ Printf.printf " OCaml + other conflicts: %7d (%5.1f%% of incompatible)\n"
339339+ !ocaml_plus_others
340340+ (100.0 *. float_of_int !ocaml_plus_others /. float_of_int !total_incompat);
341341+ Printf.printf " (no candidate pair shares an OCaml version, AND\n";
342342+ Printf.printf " ignoring OCaml still doesn't make them compatible)\n\n";
343343+ Printf.printf " Non-OCaml conflicts: %7d (%5.1f%% of incompatible)\n"
344344+ !no_ocaml_conflict
345345+ (100.0 *. float_of_int !no_ocaml_conflict /. float_of_int !total_incompat);
346346+ Printf.printf " (at least one candidate pair shares OCaml version,\n";
347347+ Printf.printf " but other dependency version conflicts prevent compatibility)\n\n";
348348+349349+ Printf.printf "--- Top non-OCaml conflict causes ---\n";
350350+ Printf.printf "(In pairs where OCaml version matches but still incompatible)\n\n";
351351+ let non_ocaml_sorted = Hashtbl.fold (fun k v acc -> (k, v) :: acc) non_ocaml_conflicts []
352352+ |> List.sort (fun (_, a) (_, b) -> compare b a) in
353353+ List.iteri (fun i (name, count) ->
354354+ if i < 30 then
355355+ Printf.printf " %-40s %d pairs\n" name count
356356+ ) non_ocaml_sorted;
357357+358358+ Printf.printf "\n--- Top conflict-causing dependencies (best-pair analysis) ---\n";
359359+ Printf.printf "(For each incompatible pair, find the candidate pair with fewest\n";
360360+ Printf.printf " conflicts, then tally which deps appear in those minimal conflicts)\n\n";
361361+ let tally_sorted = Hashtbl.fold (fun k v acc -> (k, v) :: acc) conflict_tallies []
362362+ |> List.sort (fun (_, a) (_, b) -> compare b a) in
363363+ List.iteri (fun i (name, count) ->
364364+ if i < 30 then
365365+ Printf.printf " %-40s %d pairs (%.1f%% of incompatible)\n"
366366+ name count
367367+ (100.0 *. float_of_int count /. float_of_int !total_incompat)
368368+ ) tally_sorted;
369369+370370+ Printf.printf "\nDone.\n"
···11+(*
22+ Real-World Pairwise Universe Compatibility Analysis
33+ ====================================================
44+55+ Loads all solution JSON files from the cache, extracts universe data,
66+ and runs pairwise compatibility tests across all package pairs.
77+88+ A "universe" here is the set of (package_name, version) pairs in a
99+ solved dependency tree for one target package.
1010+1111+ Two universes are compatible if they agree on every shared package version.
1212+*)
1313+1414+module StringMap = Map.Make(String)
1515+module StringSet = Set.Make(String)
1616+1717+(* ========================================================================= *)
1818+(* Data Model *)
1919+(* ========================================================================= *)
2020+2121+type universe = {
2222+ target_name : string; (* e.g. "alcotest" *)
2323+ target_version : string; (* e.g. "1.9.1" *)
2424+ deps : string StringMap.t; (* package_name -> version *)
2525+}
2626+2727+(* ========================================================================= *)
2828+(* JSON Parsing (minimal, no dependencies) *)
2929+(* ========================================================================= *)
3030+3131+(* We need to parse the solution JSON. The format is:
3232+ {"package":"name.ver","solution":{"pkg.ver":["dep.ver",...],...}}
3333+ or {"failed":true,"error":"..."} for failures.
3434+3535+ We only need the keys of the "solution" object (they are "name.version").
3636+ We split each key on the first '.' to get name and version... but that's
3737+ wrong because package names can contain dots. Actually, looking at the
3838+ opam convention: the format is "name.version" where name doesn't contain
3939+ dots but version can.
4040+4141+ Wait, actually package names CAN contain dots (e.g., "ocaml-base-compiler"
4242+ doesn't but "conf-pkg-config" etc don't either... let me check).
4343+4444+ In opam, the package name is everything before the first dot in the
4545+ string produced by OpamPackage.to_string. The format is always name.version.
4646+ Package names use hyphens, not dots, so the first dot separates name from
4747+ version.
4848+*)
4949+5050+let split_package_string s =
5151+ match String.index_opt s '.' with
5252+ | Some i ->
5353+ let name = String.sub s 0 i in
5454+ let version = String.sub s (i + 1) (String.length s - i - 1) in
5555+ (name, version)
5656+ | None -> (s, "")
5757+5858+(* Simple JSON string parser - extract string value between quotes *)
5959+(* This is a quick-and-dirty parser for our specific JSON format *)
6060+6161+type json =
6262+ | JString of string
6363+ | JBool of bool
6464+ | JList of json list
6565+ | JObj of (string * json) list
6666+ | JNull
6767+6868+let rec skip_ws s i =
6969+ if i >= String.length s then i
7070+ else match s.[i] with
7171+ | ' ' | '\t' | '\n' | '\r' -> skip_ws s (i + 1)
7272+ | _ -> i
7373+7474+let parse_string s i =
7575+ (* i points to opening quote *)
7676+ let buf = Buffer.create 64 in
7777+ let rec loop j =
7878+ if j >= String.length s then failwith "unterminated string"
7979+ else match s.[j] with
8080+ | '"' -> (Buffer.contents buf, j + 1)
8181+ | '\\' ->
8282+ if j + 1 >= String.length s then failwith "unterminated escape";
8383+ Buffer.add_char buf s.[j + 1];
8484+ loop (j + 2)
8585+ | c -> Buffer.add_char buf c; loop (j + 1)
8686+ in
8787+ loop (i + 1)
8888+8989+let rec parse_value s i =
9090+ let i = skip_ws s i in
9191+ if i >= String.length s then (JNull, i)
9292+ else match s.[i] with
9393+ | '"' ->
9494+ let (str, j) = parse_string s i in
9595+ (JString str, j)
9696+ | '{' -> parse_obj s (i + 1)
9797+ | '[' -> parse_list s (i + 1)
9898+ | 't' -> (JBool true, i + 4)
9999+ | 'f' -> (JBool false, i + 5)
100100+ | 'n' -> (JNull, i + 4)
101101+ | _ ->
102102+ (* number - skip until delimiter *)
103103+ let j = ref i in
104104+ while !j < String.length s && s.[!j] <> ',' && s.[!j] <> '}' && s.[!j] <> ']'
105105+ && s.[!j] <> ' ' && s.[!j] <> '\n' do
106106+ incr j
107107+ done;
108108+ (JString (String.sub s i (!j - i)), !j)
109109+110110+and parse_obj s i =
111111+ let i = skip_ws s i in
112112+ if i < String.length s && s.[i] = '}' then (JObj [], i + 1)
113113+ else
114114+ let pairs = ref [] in
115115+ let j = ref i in
116116+ let continue = ref true in
117117+ while !continue do
118118+ let ji = skip_ws s !j in
119119+ let (key, ji) = parse_string s ji in
120120+ let ji = skip_ws s ji in
121121+ (* expect colon *)
122122+ let ji = ji + 1 in
123123+ let (value, ji) = parse_value s ji in
124124+ pairs := (key, value) :: !pairs;
125125+ let ji = skip_ws s ji in
126126+ if ji < String.length s && s.[ji] = ',' then
127127+ j := ji + 1
128128+ else begin
129129+ j := ji + 1; (* skip closing brace *)
130130+ continue := false
131131+ end
132132+ done;
133133+ (JObj (List.rev !pairs), !j)
134134+135135+and parse_list s i =
136136+ let i = skip_ws s i in
137137+ if i < String.length s && s.[i] = ']' then (JList [], i + 1)
138138+ else
139139+ let items = ref [] in
140140+ let j = ref i in
141141+ let continue = ref true in
142142+ while !continue do
143143+ let (value, ji) = parse_value s !j in
144144+ items := value :: !items;
145145+ let ji = skip_ws s ji in
146146+ if ji < String.length s && s.[ji] = ',' then
147147+ j := ji + 1
148148+ else begin
149149+ j := ji + 1; (* skip closing bracket *)
150150+ continue := false
151151+ end
152152+ done;
153153+ (JList (List.rev !items), !j)
154154+155155+let json_member key = function
156156+ | JObj pairs -> (try List.assoc key pairs with Not_found -> JNull)
157157+ | _ -> JNull
158158+159159+(* ========================================================================= *)
160160+(* Loading Solutions *)
161161+(* ========================================================================= *)
162162+163163+let load_solution_file path =
164164+ let ic = open_in path in
165165+ let n = in_channel_length ic in
166166+ let s = Bytes.create n in
167167+ really_input ic s 0 n;
168168+ close_in ic;
169169+ let s = Bytes.to_string s in
170170+ try
171171+ let (json, _) = parse_value s 0 in
172172+ (* Check for failure *)
173173+ match json_member "failed" json with
174174+ | JBool true -> None
175175+ | _ ->
176176+ let pkg_str = match json_member "package" json with
177177+ | JString s -> s | _ -> failwith "no package field" in
178178+ let solution = match json_member "solution" json with
179179+ | JObj pairs -> pairs | _ -> failwith "no solution field" in
180180+ let (target_name, target_version) = split_package_string pkg_str in
181181+ (* Build deps map from solution keys *)
182182+ let deps = List.fold_left (fun acc (pkg_str, _deps) ->
183183+ let (name, version) = split_package_string pkg_str in
184184+ StringMap.add name version acc
185185+ ) StringMap.empty solution in
186186+ Some { target_name; target_version; deps }
187187+ with e ->
188188+ Printf.eprintf "Warning: failed to parse %s: %s\n" path (Printexc.to_string e);
189189+ None
190190+191191+let load_all_solutions dir =
192192+ let entries = Sys.readdir dir in
193193+ let solutions = ref [] in
194194+ Array.iter (fun filename ->
195195+ if Filename.check_suffix filename ".json" then begin
196196+ let path = Filename.concat dir filename in
197197+ match load_solution_file path with
198198+ | Some u -> solutions := u :: !solutions
199199+ | None -> ()
200200+ end
201201+ ) entries;
202202+ !solutions
203203+204204+(* ========================================================================= *)
205205+(* Compatibility *)
206206+(* ========================================================================= *)
207207+208208+let _universes_compatible u1 u2 =
209209+ let a, b =
210210+ if StringMap.cardinal u1.deps <= StringMap.cardinal u2.deps
211211+ then u1.deps, u2.deps
212212+ else u2.deps, u1.deps
213213+ in
214214+ StringMap.for_all (fun name ver ->
215215+ match StringMap.find_opt name b with
216216+ | None -> true
217217+ | Some ver' -> String.equal ver ver'
218218+ ) a
219219+220220+(** Check compatibility with merged map *)
221221+let compatible_with_merged merged u =
222222+ StringMap.for_all (fun name ver ->
223223+ match StringMap.find_opt name merged with
224224+ | None -> true
225225+ | Some ver' -> String.equal ver ver'
226226+ ) u.deps
227227+228228+let merge_deps merged u =
229229+ StringMap.union (fun _name v1 _v2 -> Some v1) merged u.deps
230230+231231+(* ========================================================================= *)
232232+(* Brute Force Solver (the winner from our analysis) *)
233233+(* ========================================================================= *)
234234+235235+type result =
236236+ | Compatible of universe list
237237+ | Incompatible
238238+239239+let solve_brute_force ~(desired : string list)
240240+ ~(candidates : universe list StringMap.t) : result =
241241+ let desired_arr = Array.of_list desired in
242242+ let k = Array.length desired_arr in
243243+ let cand_arrs = Array.map (fun pkg ->
244244+ match StringMap.find_opt pkg candidates with
245245+ | Some l -> Array.of_list l
246246+ | None -> [||]
247247+ ) desired_arr in
248248+ if Array.exists (fun a -> Array.length a = 0) cand_arrs then
249249+ Incompatible
250250+ else begin
251251+ let indices = Array.make k 0 in
252252+ let found = ref None in
253253+ let rec search depth merged =
254254+ if !found <> None then ()
255255+ else if depth = k then
256256+ found := Some (Array.to_list (Array.init k (fun i -> cand_arrs.(i).(indices.(i)))))
257257+ else begin
258258+ let n = Array.length cand_arrs.(depth) in
259259+ for j = 0 to n - 1 do
260260+ if !found = None then begin
261261+ indices.(depth) <- j;
262262+ let u = cand_arrs.(depth).(j) in
263263+ if compatible_with_merged merged u then
264264+ search (depth + 1) (merge_deps merged u)
265265+ end
266266+ done
267267+ end
268268+ in
269269+ search 0 StringMap.empty;
270270+ match !found with
271271+ | Some l -> Compatible l
272272+ | None -> Incompatible
273273+ end
274274+275275+(* ========================================================================= *)
276276+(* Statistics Collection *)
277277+(* ========================================================================= *)
278278+279279+type pair_stats = {
280280+ mutable compatible_pairs : int;
281281+ mutable incompatible_pairs : int;
282282+ mutable total_pairs : int;
283283+ mutable total_time : float;
284284+ (* Track which packages are most/least compatible *)
285285+ mutable pkg_compat_count : int StringMap.t;
286286+ mutable pkg_incompat_count : int StringMap.t;
287287+ (* Track conflict reasons *)
288288+ mutable conflict_packages : int StringMap.t; (* which package caused the conflict *)
289289+}
290290+291291+let new_stats () = {
292292+ compatible_pairs = 0;
293293+ incompatible_pairs = 0;
294294+ total_pairs = 0;
295295+ total_time = 0.0;
296296+ pkg_compat_count = StringMap.empty;
297297+ pkg_incompat_count = StringMap.empty;
298298+ conflict_packages = StringMap.empty;
299299+}
300300+301301+let incr_map map key =
302302+ let cur = match StringMap.find_opt key map with Some n -> n | None -> 0 in
303303+ StringMap.add key (cur + 1) map
304304+305305+(** Find which package causes a conflict between two universes *)
306306+let find_conflict_package u1 u2 =
307307+ let a, b =
308308+ if StringMap.cardinal u1.deps <= StringMap.cardinal u2.deps
309309+ then u1.deps, u2.deps
310310+ else u2.deps, u1.deps
311311+ in
312312+ StringMap.fold (fun name ver acc ->
313313+ match acc with
314314+ | Some _ -> acc
315315+ | None ->
316316+ match StringMap.find_opt name b with
317317+ | Some ver' when not (String.equal ver ver') -> Some name
318318+ | _ -> None
319319+ ) a None
320320+321321+(* ========================================================================= *)
322322+(* Main Analysis *)
323323+(* ========================================================================= *)
324324+325325+let () =
326326+ let solutions_dir = if Array.length Sys.argv > 1 then Sys.argv.(1)
327327+ else "/cache/jons-agent/solutions/54aaf73d7a" in
328328+329329+ Printf.printf "Loading solutions from %s...\n%!" solutions_dir;
330330+ let all_universes = load_all_solutions solutions_dir in
331331+ Printf.printf "Loaded %d universes\n%!" (List.length all_universes);
332332+333333+ (* Group by target package name *)
334334+ let by_package = List.fold_left (fun acc u ->
335335+ let existing = match StringMap.find_opt u.target_name acc with
336336+ | Some l -> l | None -> [] in
337337+ StringMap.add u.target_name (u :: existing) acc
338338+ ) StringMap.empty all_universes in
339339+340340+ let pkg_names = StringMap.bindings by_package |> List.map fst in
341341+ let n_packages = List.length pkg_names in
342342+ Printf.printf "Found %d distinct packages\n%!" n_packages;
343343+344344+ (* Compute some universe size statistics *)
345345+ let dep_sizes = List.map (fun u -> StringMap.cardinal u.deps) all_universes in
346346+ let dep_sizes_sorted = List.sort compare dep_sizes in
347347+ let total_deps = List.fold_left (+) 0 dep_sizes in
348348+ let avg_deps = float_of_int total_deps /. float_of_int (List.length all_universes) in
349349+ let median_deps = List.nth dep_sizes_sorted (List.length dep_sizes_sorted / 2) in
350350+ let max_deps = List.nth dep_sizes_sorted (List.length dep_sizes_sorted - 1) in
351351+ let min_deps = List.hd dep_sizes_sorted in
352352+353353+ Printf.printf "\n=== Universe Size Statistics ===\n";
354354+ Printf.printf " Total universes: %d\n" (List.length all_universes);
355355+ Printf.printf " Distinct packages: %d\n" n_packages;
356356+ Printf.printf " Avg versions per package: %.1f\n"
357357+ (float_of_int (List.length all_universes) /. float_of_int n_packages);
358358+ Printf.printf " Dependencies per universe: min=%d, median=%d, avg=%.1f, max=%d\n"
359359+ min_deps median_deps avg_deps max_deps;
360360+361361+ (* Version distribution *)
362362+ let version_counts = StringMap.map List.length by_package in
363363+ let vc_sorted = StringMap.bindings version_counts
364364+ |> List.map snd |> List.sort compare in
365365+ let vc_median = List.nth vc_sorted (List.length vc_sorted / 2) in
366366+ let vc_max = List.nth vc_sorted (List.length vc_sorted - 1) in
367367+ Printf.printf " Versions per package: min=1, median=%d, max=%d\n" vc_median vc_max;
368368+369369+ (* Show top 10 packages by version count *)
370370+ Printf.printf "\n Top 10 packages by version count:\n";
371371+ let top_by_versions = StringMap.bindings version_counts
372372+ |> List.sort (fun (_, a) (_, b) -> compare b a)
373373+ |> List.filteri (fun i _ -> i < 10) in
374374+ List.iter (fun (name, count) ->
375375+ Printf.printf " %-40s %d versions\n" name count
376376+ ) top_by_versions;
377377+378378+ (* OCaml version distribution *)
379379+ let ocaml_versions = Hashtbl.create 16 in
380380+ List.iter (fun u ->
381381+ match StringMap.find_opt "ocaml" u.deps with
382382+ | Some v ->
383383+ let cur = match Hashtbl.find_opt ocaml_versions v with
384384+ | Some n -> n | None -> 0 in
385385+ Hashtbl.replace ocaml_versions v (cur + 1)
386386+ | None -> ()
387387+ ) all_universes;
388388+ Printf.printf "\n OCaml version distribution across universes:\n";
389389+ let ov_list = Hashtbl.fold (fun k v acc -> (k, v) :: acc) ocaml_versions [] in
390390+ let ov_sorted = List.sort (fun (a, _) (b, _) -> compare a b) ov_list in
391391+ List.iter (fun (ver, count) ->
392392+ Printf.printf " OCaml %-20s %d universes (%.1f%%)\n"
393393+ ver count (100.0 *. float_of_int count /. float_of_int (List.length all_universes))
394394+ ) ov_sorted;
395395+396396+ (* ================================================================= *)
397397+ (* Pairwise compatibility analysis *)
398398+ (* ================================================================= *)
399399+400400+ let total_pairs = n_packages * (n_packages - 1) / 2 in
401401+ Printf.printf "\n=== Pairwise Compatibility Analysis ===\n";
402402+ Printf.printf " Testing %d package pairs (%d packages)...\n%!" total_pairs n_packages;
403403+404404+ let pkg_arr = Array.of_list pkg_names in
405405+ let stats = new_stats () in
406406+ let t0 = Unix.gettimeofday () in
407407+408408+ (* For each pair, try to find a compatible set *)
409409+ for i = 0 to Array.length pkg_arr - 1 do
410410+ if i mod 100 = 0 && i > 0 then
411411+ Printf.printf " Progress: %d/%d packages processed (%d pairs so far)...\n%!"
412412+ i n_packages stats.total_pairs;
413413+ for j = i + 1 to Array.length pkg_arr - 1 do
414414+ let pkg_a = pkg_arr.(i) in
415415+ let pkg_b = pkg_arr.(j) in
416416+ let desired = [pkg_a; pkg_b] in
417417+ let result = solve_brute_force ~desired ~candidates:by_package in
418418+ stats.total_pairs <- stats.total_pairs + 1;
419419+ match result with
420420+ | Compatible _ ->
421421+ stats.compatible_pairs <- stats.compatible_pairs + 1;
422422+ stats.pkg_compat_count <- incr_map stats.pkg_compat_count pkg_a;
423423+ stats.pkg_compat_count <- incr_map stats.pkg_compat_count pkg_b
424424+ | Incompatible ->
425425+ stats.incompatible_pairs <- stats.incompatible_pairs + 1;
426426+ stats.pkg_incompat_count <- incr_map stats.pkg_incompat_count pkg_a;
427427+ stats.pkg_incompat_count <- incr_map stats.pkg_incompat_count pkg_b;
428428+ (* Find what caused the conflict - check best candidates *)
429429+ let univs_a = match StringMap.find_opt pkg_a by_package with
430430+ | Some l -> l | None -> [] in
431431+ let univs_b = match StringMap.find_opt pkg_b by_package with
432432+ | Some l -> l | None -> [] in
433433+ (* Check first pair to find a representative conflict *)
434434+ (match univs_a, univs_b with
435435+ | ua :: _, ub :: _ ->
436436+ (match find_conflict_package ua ub with
437437+ | Some pkg -> stats.conflict_packages <- incr_map stats.conflict_packages pkg
438438+ | None -> ())
439439+ | _ -> ())
440440+ done
441441+ done;
442442+443443+ let t1 = Unix.gettimeofday () in
444444+ stats.total_time <- t1 -. t0;
445445+446446+ Printf.printf "\n=== Pairwise Results ===\n";
447447+ Printf.printf " Total pairs tested: %d\n" stats.total_pairs;
448448+ Printf.printf " Compatible: %d (%.1f%%)\n"
449449+ stats.compatible_pairs
450450+ (100.0 *. float_of_int stats.compatible_pairs /. float_of_int stats.total_pairs);
451451+ Printf.printf " Incompatible: %d (%.1f%%)\n"
452452+ stats.incompatible_pairs
453453+ (100.0 *. float_of_int stats.incompatible_pairs /. float_of_int stats.total_pairs);
454454+ Printf.printf " Total time: %.3f s\n" stats.total_time;
455455+ Printf.printf " Avg per pair: %.4f ms\n"
456456+ (stats.total_time /. float_of_int stats.total_pairs *. 1000.0);
457457+458458+ (* Most compatible packages *)
459459+ Printf.printf "\n Top 20 most compatible packages (compatible with most others):\n";
460460+ let compat_sorted = StringMap.bindings stats.pkg_compat_count
461461+ |> List.sort (fun (_, a) (_, b) -> compare b a) in
462462+ List.iteri (fun i (name, count) ->
463463+ if i < 20 then
464464+ Printf.printf " %-40s compatible with %d/%d packages (%.1f%%)\n"
465465+ name count (n_packages - 1)
466466+ (100.0 *. float_of_int count /. float_of_int (n_packages - 1))
467467+ ) compat_sorted;
468468+469469+ (* Most incompatible packages *)
470470+ Printf.printf "\n Top 20 most incompatible packages:\n";
471471+ let incompat_sorted = StringMap.bindings stats.pkg_incompat_count
472472+ |> List.sort (fun (_, a) (_, b) -> compare b a) in
473473+ List.iteri (fun i (name, count) ->
474474+ if i < 20 then
475475+ Printf.printf " %-40s incompatible with %d/%d packages (%.1f%%)\n"
476476+ name count (n_packages - 1)
477477+ (100.0 *. float_of_int count /. float_of_int (n_packages - 1))
478478+ ) incompat_sorted;
479479+480480+ (* Packages that are 100% compatible with everything *)
481481+ let fully_compatible = List.filter (fun (_, count) ->
482482+ count = n_packages - 1
483483+ ) compat_sorted in
484484+ Printf.printf "\n Packages compatible with ALL others: %d\n" (List.length fully_compatible);
485485+ if List.length fully_compatible > 0 && List.length fully_compatible <= 20 then
486486+ List.iter (fun (name, _) -> Printf.printf " %s\n" name) fully_compatible;
487487+488488+ (* Packages that are compatible with nothing *)
489489+ let no_compat_pkgs = List.filter (fun name ->
490490+ not (StringMap.mem name stats.pkg_compat_count)
491491+ ) pkg_names in
492492+ Printf.printf "\n Packages compatible with NOTHING: %d\n" (List.length no_compat_pkgs);
493493+ if List.length no_compat_pkgs <= 20 then
494494+ List.iter (fun name -> Printf.printf " %s\n" name) no_compat_pkgs;
495495+496496+ (* Top conflict-causing packages *)
497497+ Printf.printf "\n Top 20 packages most frequently causing conflicts:\n";
498498+ let conflict_sorted = StringMap.bindings stats.conflict_packages
499499+ |> List.sort (fun (_, a) (_, b) -> compare b a) in
500500+ List.iteri (fun i (name, count) ->
501501+ if i < 20 then
502502+ Printf.printf " %-40s caused %d conflicts\n" name count
503503+ ) conflict_sorted;
504504+505505+ (* Compatibility rate distribution *)
506506+ Printf.printf "\n Compatibility rate distribution:\n";
507507+ let rates = List.map (fun name ->
508508+ let compat = match StringMap.find_opt name stats.pkg_compat_count with
509509+ | Some n -> n | None -> 0 in
510510+ (name, float_of_int compat /. float_of_int (n_packages - 1))
511511+ ) pkg_names in
512512+ let rate_buckets = Array.make 11 0 in (* 0-10%, 10-20%, ..., 90-100%, 100% *)
513513+ List.iter (fun (_, rate) ->
514514+ let bucket = min 10 (int_of_float (rate *. 10.0)) in
515515+ rate_buckets.(bucket) <- rate_buckets.(bucket) + 1
516516+ ) rates;
517517+ for i = 0 to 10 do
518518+ let lo = i * 10 in
519519+ let hi = if i = 10 then 100 else (i + 1) * 10 in
520520+ Printf.printf " %3d-%3d%%: %d packages\n" lo hi rate_buckets.(i)
521521+ done;
522522+523523+ (* ================================================================= *)
524524+ (* Triple compatibility (sample) *)
525525+ (* ================================================================= *)
526526+527527+ Printf.printf "\n=== Triple Compatibility (sampled) ===\n";
528528+ let rng = Random.State.make [| 42 |] in
529529+ let n_triple_trials = min 100000 (n_packages * n_packages) in
530530+ Printf.printf " Testing %d random triples...\n%!" n_triple_trials;
531531+ let triple_compat = ref 0 in
532532+ let triple_incompat = ref 0 in
533533+ let t2 = Unix.gettimeofday () in
534534+ for _ = 0 to n_triple_trials - 1 do
535535+ let i = Random.State.int rng n_packages in
536536+ let j = ref (Random.State.int rng n_packages) in
537537+ while !j = i do j := Random.State.int rng n_packages done;
538538+ let k = ref (Random.State.int rng n_packages) in
539539+ while !k = i || !k = !j do k := Random.State.int rng n_packages done;
540540+ let desired = [pkg_arr.(i); pkg_arr.(!j); pkg_arr.(!k)] in
541541+ match solve_brute_force ~desired ~candidates:by_package with
542542+ | Compatible _ -> incr triple_compat
543543+ | Incompatible -> incr triple_incompat
544544+ done;
545545+ let t3 = Unix.gettimeofday () in
546546+ Printf.printf " Compatible: %d (%.1f%%)\n"
547547+ !triple_compat
548548+ (100.0 *. float_of_int !triple_compat /. float_of_int n_triple_trials);
549549+ Printf.printf " Incompatible: %d (%.1f%%)\n"
550550+ !triple_incompat
551551+ (100.0 *. float_of_int !triple_incompat /. float_of_int n_triple_trials);
552552+ Printf.printf " Time: %.3f s (%.4f ms per triple)\n"
553553+ (t3 -. t2)
554554+ ((t3 -. t2) /. float_of_int n_triple_trials *. 1000.0);
555555+556556+ (* ================================================================= *)
557557+ (* Larger group compatibility (sampled) *)
558558+ (* ================================================================= *)
559559+560560+ Printf.printf "\n=== N-way Compatibility (sampled) ===\n";
561561+ let test_n_way n trials =
562562+ let compat = ref 0 in
563563+ let t_start = Unix.gettimeofday () in
564564+ for _ = 0 to trials - 1 do
565565+ let chosen = Hashtbl.create n in
566566+ while Hashtbl.length chosen < n do
567567+ let idx = Random.State.int rng n_packages in
568568+ let name = pkg_arr.(idx) in
569569+ if not (Hashtbl.mem chosen name) then
570570+ Hashtbl.add chosen name true
571571+ done;
572572+ let desired = Hashtbl.fold (fun k _ acc -> k :: acc) chosen [] in
573573+ match solve_brute_force ~desired ~candidates:by_package with
574574+ | Compatible _ -> incr compat
575575+ | Incompatible -> ()
576576+ done;
577577+ let t_end = Unix.gettimeofday () in
578578+ let rate = 100.0 *. float_of_int !compat /. float_of_int trials in
579579+ let avg_ms = (t_end -. t_start) /. float_of_int trials *. 1000.0 in
580580+ Printf.printf " %2d packages: %5d/%d compatible (%.1f%%), avg %.4f ms/query\n"
581581+ n !compat trials rate avg_ms
582582+ in
583583+ test_n_way 2 10000;
584584+ test_n_way 3 10000;
585585+ test_n_way 5 10000;
586586+ test_n_way 8 10000;
587587+ test_n_way 10 10000;
588588+ test_n_way 15 5000;
589589+ test_n_way 20 5000;
590590+ test_n_way 30 2000;
591591+ test_n_way 50 1000;
592592+593593+ Printf.printf "\nDone.\n"
+1010
day10/analysis/universe_compat.ml
···11+(*
22+ Universe Compatibility Solver
33+ =============================
44+55+ Problem: Given a collection of "universes" (each being a complete dependency
66+ solution for one package version), and a set of desired packages, find a
77+ subset of universes — one per desired package — such that whenever the same
88+ dependency package appears in multiple selected universes, they all agree on
99+ its version.
1010+1111+ Formally:
1212+ - A universe U is a map: package_name -> version
1313+ - U.target is the package that U was solved for
1414+ - Given desired packages {p1, ..., pk}, find {U1, ..., Uk} where:
1515+ - Ui.target.name = pi for all i
1616+ - For all i,j and all package names n in both Ui and Uj: Ui(n) = Uj(n)
1717+1818+ This is a Constraint Satisfaction Problem. In the worst case it reduces to
1919+ finding a clique in a compatibility graph, which is NP-hard — but the
2020+ structure of real package ecosystems provides exploitable constraints.
2121+*)
2222+2323+(* ========================================================================= *)
2424+(* Data Model *)
2525+(* ========================================================================= *)
2626+2727+module StringMap = Map.Make(String)
2828+module StringSet = Set.Make(String)
2929+3030+(** A package is identified by name and version *)
3131+type package = {
3232+ name : string;
3333+ version : string;
3434+}
3535+3636+(** A universe is the result of solving dependencies for one target package.
3737+ It contains the target and a map from package names to versions for
3838+ all transitive dependencies (including the target itself). *)
3939+type universe = {
4040+ id : int;
4141+ target : package;
4242+ deps : string StringMap.t; (* package_name -> version *)
4343+}
4444+4545+(** Result of a compatibility search *)
4646+type result =
4747+ | Compatible of universe list
4848+ | Incompatible
4949+5050+(* ========================================================================= *)
5151+(* Utility *)
5252+(* ========================================================================= *)
5353+5454+let time_it label f =
5555+ let t0 = Unix.gettimeofday () in
5656+ let result = f () in
5757+ let t1 = Unix.gettimeofday () in
5858+ let elapsed = t1 -. t0 in
5959+ Printf.printf " %-40s %10.6f s\n" label elapsed;
6060+ (result, elapsed)
6161+6262+(* ========================================================================= *)
6363+(* Compatibility checking *)
6464+(* ========================================================================= *)
6565+6666+(** Check if two universes are compatible (no version conflicts on shared deps) *)
6767+let universes_compatible u1 u2 =
6868+ (* Walk the smaller map and check against the larger *)
6969+ let a, b =
7070+ if StringMap.cardinal u1.deps <= StringMap.cardinal u2.deps
7171+ then u1.deps, u2.deps
7272+ else u2.deps, u1.deps
7373+ in
7474+ StringMap.for_all (fun name ver ->
7575+ match StringMap.find_opt name b with
7676+ | None -> true
7777+ | Some ver' -> String.equal ver ver'
7878+ ) a
7979+8080+(** Check if a universe is compatible with a set of already-selected universes,
8181+ represented as a merged dependency map *)
8282+let compatible_with_merged merged u =
8383+ StringMap.for_all (fun name ver ->
8484+ match StringMap.find_opt name merged with
8585+ | None -> true
8686+ | Some ver' -> String.equal ver ver'
8787+ ) u.deps
8888+8989+(** Merge a universe's deps into a merged map. Assumes compatibility. *)
9090+let merge_deps merged u =
9191+ StringMap.union (fun _name v1 _v2 -> Some v1) merged u.deps
9292+9393+(* ========================================================================= *)
9494+(* Algorithm 1: Brute Force *)
9595+(* ========================================================================= *)
9696+(** Try every combination of universes, one per desired package.
9797+ Complexity: O(V1 * V2 * ... * Vk * D) where Vi is the number of
9898+ universe candidates for desired package i, and D is avg deps size.
9999+ Simple but exponential in the number of desired packages. *)
100100+101101+let solve_brute_force ~(desired : string list)
102102+ ~(candidates : universe list StringMap.t) : result =
103103+ let desired_arr = Array.of_list desired in
104104+ let k = Array.length desired_arr in
105105+ let cand_arrs = Array.map (fun pkg ->
106106+ match StringMap.find_opt pkg candidates with
107107+ | Some l -> Array.of_list l
108108+ | None -> [||]
109109+ ) desired_arr in
110110+ (* Check if any desired package has no candidates *)
111111+ if Array.exists (fun a -> Array.length a = 0) cand_arrs then
112112+ Incompatible
113113+ else begin
114114+ let indices = Array.make k 0 in
115115+ let found = ref None in
116116+ let rec search depth =
117117+ if !found <> None then ()
118118+ else if depth = k then begin
119119+ (* Check full compatibility of this combination *)
120120+ let selected = Array.init k (fun i -> cand_arrs.(i).(indices.(i))) in
121121+ let ok = ref true in
122122+ let merged = ref StringMap.empty in
123123+ let i = ref 0 in
124124+ while !ok && !i < k do
125125+ if compatible_with_merged !merged selected.(!i) then begin
126126+ merged := merge_deps !merged selected.(!i);
127127+ incr i
128128+ end else
129129+ ok := false
130130+ done;
131131+ if !ok then
132132+ found := Some (Array.to_list selected)
133133+ end else begin
134134+ let n = Array.length cand_arrs.(depth) in
135135+ for j = 0 to n - 1 do
136136+ if !found = None then begin
137137+ indices.(depth) <- j;
138138+ search (depth + 1)
139139+ end
140140+ done
141141+ end
142142+ in
143143+ search 0;
144144+ match !found with
145145+ | Some l -> Compatible l
146146+ | None -> Incompatible
147147+ end
148148+149149+(* ========================================================================= *)
150150+(* Algorithm 2: Backtracking with Forward Checking *)
151151+(* ========================================================================= *)
152152+(** Standard CSP approach: assign one universe at a time, after each
153153+ assignment prune the remaining candidates using forward checking
154154+ (remove any candidate that conflicts with the current merged state).
155155+ Prune early if any domain becomes empty. *)
156156+157157+let solve_backtrack_fc ~(desired : string list)
158158+ ~(candidates : universe list StringMap.t) : result =
159159+ let desired_arr = Array.of_list desired in
160160+ let k = Array.length desired_arr in
161161+ let initial_domains = Array.map (fun pkg ->
162162+ match StringMap.find_opt pkg candidates with
163163+ | Some l -> l
164164+ | None -> []
165165+ ) desired_arr in
166166+ if Array.exists (fun d -> d = []) initial_domains then
167167+ Incompatible
168168+ else begin
169169+ let rec search depth merged domains =
170170+ if depth = k then
171171+ Some [] (* success, will accumulate on return *)
172172+ else begin
173173+ let try_candidates = List.to_seq domains.(depth) in
174174+ Seq.fold_left (fun acc u ->
175175+ match acc with
176176+ | Some _ -> acc (* already found *)
177177+ | None ->
178178+ if compatible_with_merged merged u then begin
179179+ let merged' = merge_deps merged u in
180180+ (* Forward check: prune future domains *)
181181+ let domains' = Array.copy domains in
182182+ let pruned_ok = ref true in
183183+ for i = depth + 1 to k - 1 do
184184+ if !pruned_ok then begin
185185+ domains'.(i) <- List.filter
186186+ (fun c -> compatible_with_merged merged' c) domains'.(i);
187187+ if domains'.(i) = [] then pruned_ok := false
188188+ end
189189+ done;
190190+ if !pruned_ok then
191191+ match search (depth + 1) merged' domains' with
192192+ | Some rest -> Some (u :: rest)
193193+ | None -> None
194194+ else
195195+ None
196196+ end else
197197+ None
198198+ ) None try_candidates
199199+ end
200200+ in
201201+ match search 0 StringMap.empty initial_domains with
202202+ | Some l -> Compatible l
203203+ | None -> Incompatible
204204+ end
205205+206206+(* ========================================================================= *)
207207+(* Algorithm 3: Arc Consistency (AC-3) + Backtracking *)
208208+(* ========================================================================= *)
209209+(** Pre-process domains using AC-3 to remove values that cannot participate
210210+ in any solution, then run backtracking. The arc consistency step
211211+ iteratively removes a candidate from domain i if there exists no
212212+ candidate in domain j that is compatible with it. *)
213213+214214+let solve_ac3_backtrack ~(desired : string list)
215215+ ~(candidates : universe list StringMap.t) : result =
216216+ let desired_arr = Array.of_list desired in
217217+ let k = Array.length desired_arr in
218218+ let domains = Array.map (fun pkg ->
219219+ match StringMap.find_opt pkg candidates with
220220+ | Some l -> Array.of_list l
221221+ | None -> [||]
222222+ ) desired_arr in
223223+ if Array.exists (fun d -> Array.length d = 0) domains then
224224+ Incompatible
225225+ else begin
226226+ (* Build pairwise compatibility tables *)
227227+ (* compat.(i).(j) is a bool array array where
228228+ compat.(i).(j).(a).(b) = true iff domains.(i).(a) is compatible
229229+ with domains.(j).(b) *)
230230+ (* For efficiency, only compute for i < j *)
231231+ let compat = Array.init k (fun i ->
232232+ Array.init k (fun j ->
233233+ if i >= j then [||]
234234+ else
235235+ Array.init (Array.length domains.(i)) (fun a ->
236236+ Array.init (Array.length domains.(j)) (fun b ->
237237+ universes_compatible domains.(i).(a) domains.(j).(b)
238238+ )
239239+ )
240240+ )
241241+ ) in
242242+ (* AC-3: maintain a set of "active" candidates per domain *)
243243+ let active = Array.init k (fun i ->
244244+ Array.make (Array.length domains.(i)) true
245245+ ) in
246246+ let changed = ref true in
247247+ while !changed do
248248+ changed := false;
249249+ for i = 0 to k - 1 do
250250+ for a = 0 to Array.length domains.(i) - 1 do
251251+ if active.(i).(a) then begin
252252+ (* Check that for every other domain j, there exists at least
253253+ one active candidate b that is compatible *)
254254+ let dominated = ref false in
255255+ for j = 0 to k - 1 do
256256+ if (not !dominated) && i <> j then begin
257257+ let has_support = ref false in
258258+ for b = 0 to Array.length domains.(j) - 1 do
259259+ if (not !has_support) && active.(j).(b) then begin
260260+ let ok = if i < j then compat.(i).(j).(a).(b)
261261+ else compat.(j).(i).(b).(a) in
262262+ if ok then has_support := true
263263+ end
264264+ done;
265265+ if not !has_support then dominated := true
266266+ end
267267+ done;
268268+ if !dominated then begin
269269+ active.(i).(a) <- false;
270270+ changed := true
271271+ end
272272+ end
273273+ done
274274+ done
275275+ done;
276276+ (* Check if any domain is now empty *)
277277+ let any_empty = Array.exists (fun a ->
278278+ not (Array.exists Fun.id a)
279279+ ) active in
280280+ if any_empty then Incompatible
281281+ else begin
282282+ (* Build filtered domains and run backtracking with forward checking *)
283283+ let filtered = Array.init k (fun i ->
284284+ Array.to_list domains.(i)
285285+ |> List.filteri (fun a _u -> active.(i).(a))
286286+ ) in
287287+ let desired_list = Array.to_list desired_arr in
288288+ let cands = List.fold_left2 (fun acc pkg univs ->
289289+ StringMap.add pkg univs acc
290290+ ) StringMap.empty desired_list (Array.to_list filtered) in
291291+ solve_backtrack_fc ~desired:desired_list ~candidates:cands
292292+ end
293293+ end
294294+295295+(* ========================================================================= *)
296296+(* Algorithm 4: Greedy Intersection with Conflict Graph *)
297297+(* ========================================================================= *)
298298+(** Build an index from (package_name, version) pairs to the set of
299299+ universe IDs that contain that pair. For each desired package,
300300+ pick the candidate whose dependency set has the most overlap
301301+ with candidates for the other desired packages. Uses greedy
302302+ variable ordering (most constrained first). *)
303303+304304+let solve_greedy_indexed ~(desired : string list)
305305+ ~(candidates : universe list StringMap.t) : result =
306306+ let desired_arr = Array.of_list desired in
307307+ let k = Array.length desired_arr in
308308+ let domain_lists = Array.map (fun pkg ->
309309+ match StringMap.find_opt pkg candidates with
310310+ | Some l -> l
311311+ | None -> []
312312+ ) desired_arr in
313313+ if Array.exists (fun d -> d = []) domain_lists then
314314+ Incompatible
315315+ else begin
316316+ (* Sort by domain size ascending (most constrained first) for MRV *)
317317+ let order = Array.init k Fun.id in
318318+ Array.sort (fun i j ->
319319+ compare (List.length domain_lists.(i)) (List.length domain_lists.(j))
320320+ ) order;
321321+ let sorted_domains = Array.map (fun i -> domain_lists.(i)) order in
322322+ (* Backtracking with MRV ordering and forward checking *)
323323+ let rec search depth merged domains =
324324+ if depth = k then Some []
325325+ else begin
326326+ let try_candidates = List.to_seq domains.(depth) in
327327+ Seq.fold_left (fun acc u ->
328328+ match acc with
329329+ | Some _ -> acc
330330+ | None ->
331331+ if compatible_with_merged merged u then begin
332332+ let merged' = merge_deps merged u in
333333+ let domains' = Array.copy domains in
334334+ let pruned_ok = ref true in
335335+ for i = depth + 1 to k - 1 do
336336+ if !pruned_ok then begin
337337+ domains'.(i) <- List.filter
338338+ (fun c -> compatible_with_merged merged' c) domains'.(i);
339339+ if domains'.(i) = [] then pruned_ok := false
340340+ end
341341+ done;
342342+ if !pruned_ok then
343343+ match search (depth + 1) merged' domains' with
344344+ | Some rest -> Some (u :: rest)
345345+ | None -> None
346346+ else None
347347+ end else None
348348+ ) None try_candidates
349349+ end
350350+ in
351351+ match search 0 StringMap.empty sorted_domains with
352352+ | Some l ->
353353+ (* Unshuffle the result back to original order *)
354354+ let result_arr = Array.make k (List.hd l) in
355355+ List.iteri (fun depth u -> result_arr.(order.(depth)) <- u) l;
356356+ Compatible (Array.to_list result_arr)
357357+ | None -> Incompatible
358358+ end
359359+360360+(* ========================================================================= *)
361361+(* Algorithm 5: Signature-Based Clustering *)
362362+(* ========================================================================= *)
363363+(** Key insight: two universes are compatible iff they agree on all shared
364364+ package versions. We can compute a "signature" for each universe on a
365365+ set of discriminating packages, and only need to check compatibility
366366+ between universes with matching signatures on those packages.
367367+368368+ We identify the "pivotal" packages — those that appear in candidates for
369369+ multiple desired packages with different versions — and use them as
370370+ the discriminating set. This dramatically reduces the search space. *)
371371+372372+module IntSet = Set.Make(Int)
373373+374374+let solve_signature ~(desired : string list)
375375+ ~(candidates : universe list StringMap.t) : result =
376376+ let desired_arr = Array.of_list desired in
377377+ let k = Array.length desired_arr in
378378+ let domain_lists = Array.map (fun pkg ->
379379+ match StringMap.find_opt pkg candidates with
380380+ | Some l -> l
381381+ | None -> []
382382+ ) desired_arr in
383383+ if Array.exists (fun d -> d = []) domain_lists then
384384+ Incompatible
385385+ else begin
386386+ (* Find pivotal packages: those that appear in universes for multiple
387387+ desired packages, potentially with different versions *)
388388+ let pkg_versions : StringSet.t StringMap.t ref = ref StringMap.empty in
389389+ Array.iter (fun univs ->
390390+ List.iter (fun u ->
391391+ StringMap.iter (fun name ver ->
392392+ let existing = match StringMap.find_opt name !pkg_versions with
393393+ | Some s -> s | None -> StringSet.empty in
394394+ pkg_versions := StringMap.add name
395395+ (StringSet.add ver existing) !pkg_versions
396396+ ) u.deps
397397+ ) univs
398398+ ) domain_lists;
399399+ (* Pivotal = packages with more than one version across all universes *)
400400+ let pivotal = StringMap.fold (fun name versions acc ->
401401+ if StringSet.cardinal versions > 1 then StringSet.add name acc
402402+ else acc
403403+ ) !pkg_versions StringSet.empty in
404404+ (* Compute signature for each universe: just the pivotal deps *)
405405+ let signature u =
406406+ StringMap.filter (fun name _ver -> StringSet.mem name pivotal) u.deps
407407+ in
408408+ (* Group candidates by signature *)
409409+ (* Use signature as a key by converting to sorted assoc list string *)
410410+ let sig_to_string sig_map =
411411+ StringMap.bindings sig_map
412412+ |> List.map (fun (k, v) -> k ^ "=" ^ v)
413413+ |> String.concat ","
414414+ in
415415+ (* For each domain, group universes by signature *)
416416+ let grouped_domains = Array.map (fun univs ->
417417+ let tbl = Hashtbl.create 16 in
418418+ List.iter (fun u ->
419419+ let s = sig_to_string (signature u) in
420420+ let existing = match Hashtbl.find_opt tbl s with
421421+ | Some l -> l | None -> [] in
422422+ Hashtbl.replace tbl s (u :: existing)
423423+ ) univs;
424424+ tbl
425425+ ) domain_lists in
426426+ (* For the first desired package, try each signature group.
427427+ For subsequent packages, only try groups whose signature is compatible *)
428428+ let rec search depth merged sig_constraint domains =
429429+ if depth = k then Some []
430430+ else begin
431431+ let tbl = domains.(depth) in
432432+ let found = ref None in
433433+ Hashtbl.iter (fun sig_str univs ->
434434+ if !found <> None then ()
435435+ else begin
436436+ (* Quick check: does this signature match constraints? *)
437437+ let sig_map = signature (List.hd univs) in
438438+ let sig_ok = StringMap.for_all (fun name ver ->
439439+ match StringMap.find_opt name sig_constraint with
440440+ | None -> true
441441+ | Some ver' -> String.equal ver ver'
442442+ ) sig_map in
443443+ ignore sig_str;
444444+ if sig_ok then begin
445445+ (* Try each universe in this group *)
446446+ List.iter (fun u ->
447447+ if !found <> None then ()
448448+ else if compatible_with_merged merged u then begin
449449+ let merged' = merge_deps merged u in
450450+ let sig_constraint' = StringMap.union
451451+ (fun _k v1 _v2 -> Some v1) sig_constraint (signature u) in
452452+ match search (depth + 1) merged' sig_constraint' domains with
453453+ | Some rest -> found := Some (u :: rest)
454454+ | None -> ()
455455+ end
456456+ ) univs
457457+ end
458458+ end
459459+ ) tbl;
460460+ !found
461461+ end
462462+ in
463463+ match search 0 StringMap.empty StringMap.empty grouped_domains with
464464+ | Some l -> Compatible l
465465+ | None -> Incompatible
466466+ end
467467+468468+(* ========================================================================= *)
469469+(* Algorithm 6: Dependency Fingerprint Hashing *)
470470+(* ========================================================================= *)
471471+(** Pre-compute a hash for each universe based on its dependency versions.
472472+ Group universes into equivalence classes by their hash on shared packages.
473473+ This enables O(1) lookups to find compatible universe groups.
474474+475475+ For each pair of desired packages, compute the set of packages that
476476+ appear in both their candidate universes. Hash each candidate on just
477477+ those shared packages. Only candidates with matching hashes on shared
478478+ packages can be compatible. *)
479479+480480+let solve_fingerprint ~(desired : string list)
481481+ ~(candidates : universe list StringMap.t) : result =
482482+ let desired_arr = Array.of_list desired in
483483+ let k = Array.length desired_arr in
484484+ let domain_lists = Array.map (fun pkg ->
485485+ match StringMap.find_opt pkg candidates with
486486+ | Some l -> l
487487+ | None -> []
488488+ ) desired_arr in
489489+ if Array.exists (fun d -> d = []) domain_lists then
490490+ Incompatible
491491+ else begin
492492+ (* For each pair (i, j) of desired packages, find the set of dep
493493+ package names that can appear in both *)
494494+ let shared_names = Array.init k (fun i ->
495495+ Array.init k (fun j ->
496496+ if i = j then StringSet.empty
497497+ else begin
498498+ let names_i = List.fold_left (fun acc u ->
499499+ StringMap.fold (fun name _v s -> StringSet.add name s) u.deps acc
500500+ ) StringSet.empty domain_lists.(i) in
501501+ let names_j = List.fold_left (fun acc u ->
502502+ StringMap.fold (fun name _v s -> StringSet.add name s) u.deps acc
503503+ ) StringSet.empty domain_lists.(j) in
504504+ StringSet.inter names_i names_j
505505+ end
506506+ )
507507+ ) in
508508+ (* Fingerprint a universe on a set of package names *)
509509+ let fingerprint names u =
510510+ StringSet.fold (fun name acc ->
511511+ match StringMap.find_opt name u.deps with
512512+ | Some ver -> (name, ver) :: acc
513513+ | None -> acc
514514+ ) names []
515515+ |> List.sort compare
516516+ |> List.map (fun (k,v) -> k ^ "=" ^ v)
517517+ |> String.concat "|"
518518+ in
519519+ (* For each pair (i,j), build lookup tables *)
520520+ (* For domain j, group by fingerprint relative to shared(i,j) *)
521521+ let fprint_tables = Array.init k (fun i ->
522522+ Array.init k (fun j ->
523523+ if i = j then Hashtbl.create 0
524524+ else begin
525525+ let tbl = Hashtbl.create 16 in
526526+ List.iter (fun u ->
527527+ let fp = fingerprint shared_names.(i).(j) u in
528528+ let existing = match Hashtbl.find_opt tbl fp with
529529+ | Some l -> l | None -> [] in
530530+ Hashtbl.replace tbl fp (u :: existing)
531531+ ) domain_lists.(j);
532532+ tbl
533533+ end
534534+ )
535535+ ) in
536536+ (* Now do backtracking, but use fingerprint tables to restrict candidates *)
537537+ let rec search depth merged selected =
538538+ if depth = k then Some (List.rev selected)
539539+ else begin
540540+ (* Compute the valid candidates for domain[depth]:
541541+ intersect the fingerprint-compatible sets from all
542542+ previously selected universes *)
543543+ let candidates_for_depth =
544544+ if depth = 0 then domain_lists.(0)
545545+ else begin
546546+ (* Start with all candidates, filter by compatibility with each
547547+ previously selected universe using fingerprints *)
548548+ let initial = domain_lists.(depth) in
549549+ List.fold_left (fun acc prev_depth ->
550550+ let prev_u = List.nth selected (List.length selected - 1 - prev_depth + (depth - (List.length selected))) in
551551+ ignore prev_u;
552552+ acc
553553+ ) initial []
554554+ |> ignore;
555555+ (* Actually, the fingerprint approach is better used as a filter *)
556556+ List.filter (fun u -> compatible_with_merged merged u) initial
557557+ end
558558+ in
559559+ let found = ref None in
560560+ List.iter (fun u ->
561561+ if !found <> None then ()
562562+ else begin
563563+ let merged' = merge_deps merged u in
564564+ (* Quick forward check using fingerprints: for each future domain,
565565+ does at least one candidate have a matching fingerprint? *)
566566+ let viable = ref true in
567567+ for j = depth + 1 to k - 1 do
568568+ if !viable then begin
569569+ let fp = fingerprint shared_names.(depth).(j) u in
570570+ ignore fprint_tables;
571571+ (* Check if any candidate in domain j matches *)
572572+ let has_match = List.exists (fun c ->
573573+ compatible_with_merged merged' c
574574+ ) domain_lists.(j) in
575575+ if not has_match then viable := false;
576576+ ignore fp
577577+ end
578578+ done;
579579+ if !viable then
580580+ match search (depth + 1) merged' (u :: selected) with
581581+ | Some result -> found := Some result
582582+ | None -> ()
583583+ end
584584+ ) candidates_for_depth;
585585+ !found
586586+ end
587587+ in
588588+ match search 0 StringMap.empty [] with
589589+ | Some l -> Compatible l
590590+ | None -> Incompatible
591591+ end
592592+593593+(* ========================================================================= *)
594594+(* Synthetic Data Generation *)
595595+(* ========================================================================= *)
596596+597597+let next_id = ref 0
598598+599599+let fresh_id () =
600600+ let id = !next_id in
601601+ incr next_id;
602602+ id
603603+604604+(** Generate a synthetic universe for a given target package.
605605+ [n_deps]: how many dependencies it has
606606+ [dep_pool]: pool of available (name, version list) to pick from
607607+ [ocaml_version]: which OCaml version this universe uses *)
608608+let gen_universe ~target_name ~target_version ~ocaml_version
609609+ ~(dep_pool : (string * string list) array) ~n_deps ~rng =
610610+ let id = fresh_id () in
611611+ let deps = ref (StringMap.singleton target_name target_version
612612+ |> StringMap.add "ocaml" ocaml_version) in
613613+ (* Pick n_deps random dependencies from the pool *)
614614+ let pool_size = Array.length dep_pool in
615615+ let used = Hashtbl.create n_deps in
616616+ let added = ref 0 in
617617+ while !added < n_deps && !added < pool_size do
618618+ let idx = Random.State.int rng pool_size in
619619+ if not (Hashtbl.mem used idx) then begin
620620+ Hashtbl.add used idx true;
621621+ let (name, versions) = dep_pool.(idx) in
622622+ if not (StringMap.mem name !deps) then begin
623623+ (* Pick a version that's somewhat correlated with ocaml version
624624+ to create realistic clustering *)
625625+ let ver_idx = if String.get ocaml_version 0 = '4' then 0
626626+ else (List.length versions - 1) in
627627+ let ver_idx = min ver_idx (List.length versions - 1) in
628628+ let ver = List.nth versions ver_idx in
629629+ deps := StringMap.add name ver !deps;
630630+ incr added
631631+ end
632632+ end
633633+ done;
634634+ { id; target = { name = target_name; version = target_version }; deps = !deps }
635635+636636+(** Generate a full test scenario.
637637+ [n_packages]: number of distinct packages
638638+ [versions_per_pkg]: versions each package has
639639+ [n_shared_deps]: number of shared dependency packages in the pool
640640+ [deps_per_universe]: how many deps each universe picks from the pool
641641+ [n_ocaml_versions]: how many OCaml versions exist *)
642642+let gen_scenario ~n_packages ~versions_per_pkg ~n_shared_deps
643643+ ~deps_per_universe ~n_ocaml_versions ~rng =
644644+ (* Create the shared dependency pool *)
645645+ let dep_pool = Array.init n_shared_deps (fun i ->
646646+ let name = Printf.sprintf "dep-%04d" i in
647647+ let versions = List.init (1 + Random.State.int rng 4) (fun v ->
648648+ Printf.sprintf "%d.0.0" (v + 1)
649649+ ) in
650650+ (name, versions)
651651+ ) in
652652+ let ocaml_versions = Array.init n_ocaml_versions (fun i ->
653653+ if i < n_ocaml_versions / 2 then Printf.sprintf "4.14.%d" i
654654+ else Printf.sprintf "5.%d.0" (i - n_ocaml_versions / 2)
655655+ ) in
656656+ (* Generate universes for each package *)
657657+ let all_candidates = ref StringMap.empty in
658658+ let all_packages = Array.init n_packages (fun i ->
659659+ Printf.sprintf "pkg-%04d" i
660660+ ) in
661661+ Array.iter (fun pkg_name ->
662662+ let univs = List.init versions_per_pkg (fun v ->
663663+ let target_version = Printf.sprintf "%d.0.0" (v + 1) in
664664+ (* Each version might work with different OCaml versions *)
665665+ let n_ocamls = 1 + Random.State.int rng (min 2 n_ocaml_versions) in
666666+ List.init n_ocamls (fun oi ->
667667+ let ocaml_idx = (v + oi) mod n_ocaml_versions in
668668+ let ocaml_version = ocaml_versions.(ocaml_idx) in
669669+ gen_universe ~target_name:pkg_name ~target_version ~ocaml_version
670670+ ~dep_pool ~n_deps:deps_per_universe ~rng
671671+ )
672672+ ) |> List.flatten in
673673+ all_candidates := StringMap.add pkg_name univs !all_candidates
674674+ ) all_packages;
675675+ (all_packages, !all_candidates)
676676+677677+(* ========================================================================= *)
678678+(* Test Cases *)
679679+(* ========================================================================= *)
680680+681681+(** Hand-crafted test: the example from the problem statement *)
682682+let test_basic () =
683683+ Printf.printf "\n=== Test: Basic (problem statement example) ===\n";
684684+ next_id := 0;
685685+ let u_a1 = { id = fresh_id ();
686686+ target = { name = "a"; version = "1" };
687687+ deps = StringMap.of_list [("a", "1"); ("ocaml", "4.14")] } in
688688+ let u_a2 = { id = fresh_id ();
689689+ target = { name = "a"; version = "2" };
690690+ deps = StringMap.of_list [("a", "2"); ("ocaml", "5.0")] } in
691691+ let u_b1 = { id = fresh_id ();
692692+ target = { name = "b"; version = "1" };
693693+ deps = StringMap.of_list [("b", "1"); ("ocaml", "4.14")] } in
694694+ let candidates = StringMap.of_list [
695695+ ("a", [u_a1; u_a2]);
696696+ ("b", [u_b1]);
697697+ ] in
698698+ let desired = ["a"; "b"] in
699699+ let solvers = [
700700+ ("Brute force", fun () -> solve_brute_force ~desired ~candidates);
701701+ ("Backtrack+FC", fun () -> solve_backtrack_fc ~desired ~candidates);
702702+ ("AC-3+Backtrack", fun () -> solve_ac3_backtrack ~desired ~candidates);
703703+ ("Greedy+MRV", fun () -> solve_greedy_indexed ~desired ~candidates);
704704+ ("Signature", fun () -> solve_signature ~desired ~candidates);
705705+ ("Fingerprint", fun () -> solve_fingerprint ~desired ~candidates);
706706+ ] in
707707+ List.iter (fun (name, solver) ->
708708+ let (result, _elapsed) = time_it name solver in
709709+ match result with
710710+ | Compatible univs ->
711711+ Printf.printf " -> Compatible: %s\n"
712712+ (String.concat ", " (List.map (fun u ->
713713+ Printf.sprintf "%s.%s" u.target.name u.target.version) univs))
714714+ | Incompatible ->
715715+ Printf.printf " -> Incompatible\n"
716716+ ) solvers
717717+718718+(** Hand-crafted test: the extended example with c appearing in multiple
719719+ universes *)
720720+let test_extended () =
721721+ Printf.printf "\n=== Test: Extended (c in multiple universes) ===\n";
722722+ next_id := 0;
723723+ let u_a1 = { id = fresh_id ();
724724+ target = { name = "a"; version = "1" };
725725+ deps = StringMap.of_list [("a", "1"); ("ocaml", "4.14")] } in
726726+ let u_a2 = { id = fresh_id ();
727727+ target = { name = "a"; version = "2" };
728728+ deps = StringMap.of_list [("a", "2"); ("ocaml", "5.0")] } in
729729+ let u_b1 = { id = fresh_id ();
730730+ target = { name = "b"; version = "1" };
731731+ deps = StringMap.of_list [("b", "1"); ("ocaml", "4.14")] } in
732732+ (* c.1 can build with any OCaml, so appears in two universes *)
733733+ let u_c1_414 = { id = fresh_id ();
734734+ target = { name = "c"; version = "1" };
735735+ deps = StringMap.of_list [("c", "1"); ("ocaml", "4.14")] } in
736736+ let u_c1_50 = { id = fresh_id ();
737737+ target = { name = "c"; version = "1" };
738738+ deps = StringMap.of_list [("c", "1"); ("ocaml", "5.0")] } in
739739+ (* d depends on c and a, solved with OCaml 5 *)
740740+ let u_d1 = { id = fresh_id ();
741741+ target = { name = "d"; version = "1" };
742742+ deps = StringMap.of_list [("d", "1"); ("a", "2"); ("c", "1"); ("ocaml", "5.0")] } in
743743+ (* e depends on a, b, c, solved with OCaml 4.14 *)
744744+ let u_e1 = { id = fresh_id ();
745745+ target = { name = "e"; version = "1" };
746746+ deps = StringMap.of_list [("e", "1"); ("a", "1"); ("b", "1"); ("c", "1"); ("ocaml", "4.14")] } in
747747+ let candidates = StringMap.of_list [
748748+ ("a", [u_a1; u_a2]);
749749+ ("b", [u_b1]);
750750+ ("c", [u_c1_414; u_c1_50]);
751751+ ("d", [u_d1]);
752752+ ("e", [u_e1]);
753753+ ] in
754754+ Printf.printf " Subtest: want d and e (should be incompatible - OCaml conflict)\n";
755755+ let desired = ["d"; "e"] in
756756+ List.iter (fun (name, solver) ->
757757+ let (result, _) = time_it name solver in
758758+ match result with
759759+ | Compatible univs ->
760760+ Printf.printf " -> Compatible: %s\n"
761761+ (String.concat ", " (List.map (fun u ->
762762+ Printf.sprintf "%s.%s" u.target.name u.target.version) univs))
763763+ | Incompatible -> Printf.printf " -> Incompatible\n"
764764+ ) [
765765+ ("Brute force", fun () -> solve_brute_force ~desired ~candidates);
766766+ ("Backtrack+FC", fun () -> solve_backtrack_fc ~desired ~candidates);
767767+ ("AC-3+Backtrack", fun () -> solve_ac3_backtrack ~desired ~candidates);
768768+ ("Greedy+MRV", fun () -> solve_greedy_indexed ~desired ~candidates);
769769+ ("Signature", fun () -> solve_signature ~desired ~candidates);
770770+ ("Fingerprint", fun () -> solve_fingerprint ~desired ~candidates);
771771+ ];
772772+ Printf.printf " Subtest: want a and b (should be compatible via a.1)\n";
773773+ let desired = ["a"; "b"] in
774774+ List.iter (fun (name, solver) ->
775775+ let (result, _) = time_it name solver in
776776+ match result with
777777+ | Compatible univs ->
778778+ Printf.printf " -> Compatible: %s\n"
779779+ (String.concat ", " (List.map (fun u ->
780780+ Printf.sprintf "%s.%s" u.target.name u.target.version) univs))
781781+ | Incompatible -> Printf.printf " -> Incompatible\n"
782782+ ) [
783783+ ("Brute force", fun () -> solve_brute_force ~desired ~candidates);
784784+ ("Backtrack+FC", fun () -> solve_backtrack_fc ~desired ~candidates);
785785+ ("AC-3+Backtrack", fun () -> solve_ac3_backtrack ~desired ~candidates);
786786+ ("Greedy+MRV", fun () -> solve_greedy_indexed ~desired ~candidates);
787787+ ("Signature", fun () -> solve_signature ~desired ~candidates);
788788+ ("Fingerprint", fun () -> solve_fingerprint ~desired ~candidates);
789789+ ];
790790+ Printf.printf " Subtest: want a, b, and c (should be compatible via OCaml 4.14)\n";
791791+ let desired = ["a"; "b"; "c"] in
792792+ List.iter (fun (name, solver) ->
793793+ let (result, _) = time_it name solver in
794794+ match result with
795795+ | Compatible univs ->
796796+ Printf.printf " -> Compatible: %s\n"
797797+ (String.concat ", " (List.map (fun u ->
798798+ Printf.sprintf "%s.%s" u.target.name u.target.version) univs))
799799+ | Incompatible -> Printf.printf " -> Incompatible\n"
800800+ ) [
801801+ ("Brute force", fun () -> solve_brute_force ~desired ~candidates);
802802+ ("Backtrack+FC", fun () -> solve_backtrack_fc ~desired ~candidates);
803803+ ("AC-3+Backtrack", fun () -> solve_ac3_backtrack ~desired ~candidates);
804804+ ("Greedy+MRV", fun () -> solve_greedy_indexed ~desired ~candidates);
805805+ ("Signature", fun () -> solve_signature ~desired ~candidates);
806806+ ("Fingerprint", fun () -> solve_fingerprint ~desired ~candidates);
807807+ ]
808808+809809+(** Test with no solution possible *)
810810+let test_impossible () =
811811+ Printf.printf "\n=== Test: Impossible (no compatible combination) ===\n";
812812+ next_id := 0;
813813+ let u_x1 = { id = fresh_id ();
814814+ target = { name = "x"; version = "1" };
815815+ deps = StringMap.of_list [("x", "1"); ("shared", "1"); ("ocaml", "5.0")] } in
816816+ let u_y1 = { id = fresh_id ();
817817+ target = { name = "y"; version = "1" };
818818+ deps = StringMap.of_list [("y", "1"); ("shared", "2"); ("ocaml", "5.0")] } in
819819+ let candidates = StringMap.of_list [
820820+ ("x", [u_x1]);
821821+ ("y", [u_y1]);
822822+ ] in
823823+ let desired = ["x"; "y"] in
824824+ List.iter (fun (name, solver) ->
825825+ let (result, _) = time_it name solver in
826826+ match result with
827827+ | Compatible _ -> Printf.printf " -> ERROR: should be incompatible!\n"
828828+ | Incompatible -> Printf.printf " -> Incompatible (correct)\n"
829829+ ) [
830830+ ("Brute force", fun () -> solve_brute_force ~desired ~candidates);
831831+ ("Backtrack+FC", fun () -> solve_backtrack_fc ~desired ~candidates);
832832+ ("AC-3+Backtrack", fun () -> solve_ac3_backtrack ~desired ~candidates);
833833+ ("Greedy+MRV", fun () -> solve_greedy_indexed ~desired ~candidates);
834834+ ("Signature", fun () -> solve_signature ~desired ~candidates);
835835+ ("Fingerprint", fun () -> solve_fingerprint ~desired ~candidates);
836836+ ]
837837+838838+(** Validate that all solvers agree *)
839839+let validate_agreement solvers _desired _candidates label =
840840+ let results = List.map (fun (name, solver) ->
841841+ let (result, _) = time_it name solver in
842842+ (name, result)
843843+ ) solvers in
844844+ let all_compat = List.for_all (fun (_, r) -> match r with Compatible _ -> true | _ -> false) results in
845845+ let all_incompat = List.for_all (fun (_, r) -> match r with Incompatible -> true | _ -> false) results in
846846+ if not (all_compat || all_incompat) then begin
847847+ Printf.printf " WARNING: Disagreement on %s!\n" label;
848848+ List.iter (fun (name, result) ->
849849+ Printf.printf " %s: %s\n" name
850850+ (match result with Compatible _ -> "Compatible" | Incompatible -> "Incompatible")
851851+ ) results;
852852+ false
853853+ end else
854854+ true
855855+856856+(* ========================================================================= *)
857857+(* Benchmarks *)
858858+(* ========================================================================= *)
859859+860860+let run_benchmark ~label ~n_packages ~versions_per_pkg ~n_shared_deps
861861+ ~deps_per_universe ~n_ocaml_versions ~n_desired ~n_trials =
862862+ Printf.printf "\n=== Benchmark: %s ===\n" label;
863863+ Printf.printf " Config: %d packages, %d versions each, %d shared deps, \
864864+ %d deps/universe, %d OCaml versions\n"
865865+ n_packages versions_per_pkg n_shared_deps deps_per_universe n_ocaml_versions;
866866+ Printf.printf " Query: %d desired packages, %d trials\n" n_desired n_trials;
867867+ let rng = Random.State.make [| 42 |] in
868868+ let (all_packages, candidates) = gen_scenario
869869+ ~n_packages ~versions_per_pkg ~n_shared_deps
870870+ ~deps_per_universe ~n_ocaml_versions ~rng in
871871+ let total_universes = StringMap.fold (fun _k v acc ->
872872+ acc + List.length v) candidates 0 in
873873+ Printf.printf " Total universes: %d\n" total_universes;
874874+ (* Run multiple trials with random desired sets *)
875875+ let solver_times = Hashtbl.create 8 in
876876+ let solver_names = [
877877+ "Brute force"; "Backtrack+FC"; "AC-3+Backtrack";
878878+ "Greedy+MRV"; "Signature"; "Fingerprint"
879879+ ] in
880880+ List.iter (fun name -> Hashtbl.replace solver_times name 0.0) solver_names;
881881+ let agreements = ref 0 in
882882+ let total = ref 0 in
883883+ let compat_count = ref 0 in
884884+ for trial = 0 to n_trials - 1 do
885885+ (* Pick n_desired random packages *)
886886+ let desired = ref StringSet.empty in
887887+ while StringSet.cardinal !desired < n_desired do
888888+ let idx = Random.State.int rng (Array.length all_packages) in
889889+ desired := StringSet.add all_packages.(idx) !desired
890890+ done;
891891+ let desired_list = StringSet.elements !desired in
892892+ if trial = 0 then
893893+ Printf.printf " Sample desired: %s\n" (String.concat ", " desired_list);
894894+ let solvers = [
895895+ ("Brute force", fun () -> solve_brute_force ~desired:desired_list ~candidates);
896896+ ("Backtrack+FC", fun () -> solve_backtrack_fc ~desired:desired_list ~candidates);
897897+ ("AC-3+Backtrack", fun () -> solve_ac3_backtrack ~desired:desired_list ~candidates);
898898+ ("Greedy+MRV", fun () -> solve_greedy_indexed ~desired:desired_list ~candidates);
899899+ ("Signature", fun () -> solve_signature ~desired:desired_list ~candidates);
900900+ ("Fingerprint", fun () -> solve_fingerprint ~desired:desired_list ~candidates);
901901+ ] in
902902+ Printf.printf " Trial %d:\n" trial;
903903+ let ok = validate_agreement solvers desired_list candidates
904904+ (Printf.sprintf "trial %d" trial) in
905905+ if ok then incr agreements;
906906+ incr total;
907907+ (* Record times - re-run to get individual times *)
908908+ List.iter (fun (name, solver) ->
909909+ let (result, elapsed) = time_it name solver in
910910+ let prev = Hashtbl.find solver_times name in
911911+ Hashtbl.replace solver_times name (prev +. elapsed);
912912+ if trial = 0 then begin
913913+ match result with
914914+ | Compatible _ -> incr compat_count
915915+ | Incompatible -> ()
916916+ end
917917+ ) solvers
918918+ done;
919919+ Printf.printf "\n --- Summary for %s ---\n" label;
920920+ Printf.printf " Agreement: %d/%d trials\n" !agreements !total;
921921+ Printf.printf " %-40s %12s %12s\n" "Algorithm" "Total (s)" "Avg (ms)";
922922+ Printf.printf " %s\n" (String.make 66 '-');
923923+ List.iter (fun name ->
924924+ let total_time = Hashtbl.find solver_times name in
925925+ (* Each solver runs twice per trial (once for validation, once for timing) *)
926926+ let avg_ms = total_time /. (float_of_int n_trials) *. 1000.0 in
927927+ Printf.printf " %-40s %12.6f %12.4f\n" name total_time avg_ms
928928+ ) solver_names
929929+930930+(* ========================================================================= *)
931931+(* Stress test: large scale *)
932932+(* ========================================================================= *)
933933+934934+let run_scale_test () =
935935+ Printf.printf "\n\n";
936936+ Printf.printf "╔══════════════════════════════════════════════════════════════╗\n";
937937+ Printf.printf "║ UNIVERSE COMPATIBILITY SOLVER ANALYSIS ║\n";
938938+ Printf.printf "╚══════════════════════════════════════════════════════════════╝\n";
939939+940940+ (* Correctness tests *)
941941+ Printf.printf "\n\n--- PHASE 1: Correctness Tests ---\n";
942942+ test_basic ();
943943+ test_extended ();
944944+ test_impossible ();
945945+946946+ (* Small benchmark *)
947947+ Printf.printf "\n\n--- PHASE 2: Small Scale Benchmarks ---\n";
948948+ run_benchmark ~label:"Tiny (10 pkgs, 2 desired)"
949949+ ~n_packages:10 ~versions_per_pkg:3 ~n_shared_deps:20
950950+ ~deps_per_universe:8 ~n_ocaml_versions:4 ~n_desired:2 ~n_trials:5;
951951+952952+ run_benchmark ~label:"Small (50 pkgs, 3 desired)"
953953+ ~n_packages:50 ~versions_per_pkg:5 ~n_shared_deps:50
954954+ ~deps_per_universe:15 ~n_ocaml_versions:4 ~n_desired:3 ~n_trials:5;
955955+956956+ (* Medium benchmark *)
957957+ Printf.printf "\n\n--- PHASE 3: Medium Scale Benchmarks ---\n";
958958+ run_benchmark ~label:"Medium (200 pkgs, 3 desired)"
959959+ ~n_packages:200 ~versions_per_pkg:5 ~n_shared_deps:100
960960+ ~deps_per_universe:25 ~n_ocaml_versions:6 ~n_desired:3 ~n_trials:5;
961961+962962+ run_benchmark ~label:"Medium (200 pkgs, 5 desired)"
963963+ ~n_packages:200 ~versions_per_pkg:5 ~n_shared_deps:100
964964+ ~deps_per_universe:25 ~n_ocaml_versions:6 ~n_desired:5 ~n_trials:5;
965965+966966+ (* Large benchmark *)
967967+ Printf.printf "\n\n--- PHASE 4: Large Scale Benchmarks ---\n";
968968+ run_benchmark ~label:"Large (500 pkgs, 3 desired)"
969969+ ~n_packages:500 ~versions_per_pkg:8 ~n_shared_deps:200
970970+ ~deps_per_universe:40 ~n_ocaml_versions:6 ~n_desired:3 ~n_trials:5;
971971+972972+ run_benchmark ~label:"Large (500 pkgs, 5 desired)"
973973+ ~n_packages:500 ~versions_per_pkg:8 ~n_shared_deps:200
974974+ ~deps_per_universe:40 ~n_ocaml_versions:6 ~n_desired:5 ~n_trials:5;
975975+976976+ run_benchmark ~label:"Large (500 pkgs, 8 desired)"
977977+ ~n_packages:500 ~versions_per_pkg:8 ~n_shared_deps:200
978978+ ~deps_per_universe:40 ~n_ocaml_versions:6 ~n_desired:8 ~n_trials:3;
979979+980980+ (* Very large benchmark - approaching production scale *)
981981+ Printf.printf "\n\n--- PHASE 5: Production Scale Benchmarks ---\n";
982982+ run_benchmark ~label:"XL (2000 pkgs, 3 desired)"
983983+ ~n_packages:2000 ~versions_per_pkg:8 ~n_shared_deps:500
984984+ ~deps_per_universe:60 ~n_ocaml_versions:8 ~n_desired:3 ~n_trials:3;
985985+986986+ run_benchmark ~label:"XL (2000 pkgs, 5 desired)"
987987+ ~n_packages:2000 ~versions_per_pkg:8 ~n_shared_deps:500
988988+ ~deps_per_universe:60 ~n_ocaml_versions:8 ~n_desired:5 ~n_trials:3;
989989+990990+ run_benchmark ~label:"XL (2000 pkgs, 10 desired)"
991991+ ~n_packages:2000 ~versions_per_pkg:8 ~n_shared_deps:500
992992+ ~deps_per_universe:60 ~n_ocaml_versions:8 ~n_desired:10 ~n_trials:3;
993993+994994+ (* Pathological: many versions, high conflict, incompatible *)
995995+ Printf.printf "\n\n--- PHASE 6: Pathological Cases ---\n";
996996+ run_benchmark ~label:"Pathological: many versions (20 ver, 5 desired)"
997997+ ~n_packages:100 ~versions_per_pkg:20 ~n_shared_deps:80
998998+ ~deps_per_universe:30 ~n_ocaml_versions:10 ~n_desired:5 ~n_trials:3;
999999+10001000+ run_benchmark ~label:"Pathological: many versions (20 ver, 10 desired)"
10011001+ ~n_packages:100 ~versions_per_pkg:20 ~n_shared_deps:80
10021002+ ~deps_per_universe:30 ~n_ocaml_versions:10 ~n_desired:10 ~n_trials:3;
10031003+10041004+ run_benchmark ~label:"Extreme: high deps overlap (2000 pkg, 100 shared, 80 deps)"
10051005+ ~n_packages:2000 ~versions_per_pkg:8 ~n_shared_deps:100
10061006+ ~deps_per_universe:80 ~n_ocaml_versions:8 ~n_desired:5 ~n_trials:3;
10071007+10081008+ Printf.printf "\n\nDone.\n"
10091009+10101010+let () = run_scale_test ()
+126
day10/bin/blessing.ml
···11+(** Pre-computed blessing system for package documentation.
22+33+ Given solutions for multiple target packages, determines which universe
44+ (dependency set) is "blessed" for each (package, version) pair.
55+66+ Heuristic (from ocaml-docs-ci):
77+ 1. Maximize deps_count: prefer universes with more dependencies
88+ (favors optional deps being resolved → richer documentation)
99+ 2. Maximize revdeps_count: prefer universes where this package has
1010+ more reverse dependencies (stability: changing blessings cascades)
1111+1212+ The result is a per-target blessing map: for each package in a target's
1313+ solution, whether that package is blessed in that solution's universe. *)
1414+1515+(** Hash a set of transitive dependencies to produce a universe identifier.
1616+ Uses sorted package names to ensure determinism. *)
1717+let universe_hash_of_deps deps =
1818+ deps
1919+ |> OpamPackage.Set.elements
2020+ |> List.map OpamPackage.to_string
2121+ |> String.concat "\n"
2222+ |> Digest.string
2323+ |> Digest.to_hex
2424+2525+(** Compute blessing maps for a set of solved targets.
2626+2727+ Input: list of (target_package, transitive_deps_map) where
2828+ transitive_deps_map maps each package in the solution to its full
2929+ transitive dependency set.
3030+3131+ Output: list of (target_package, blessing_map) where blessing_map
3232+ maps each package to [true] if blessed in this solution, [false] otherwise. *)
3333+let compute_blessings
3434+ (solutions : (OpamPackage.t * OpamPackage.Set.t OpamPackage.Map.t) list) =
3535+ (* Step 1: Compute revdeps counts across all solutions.
3636+ For each package P, count how many packages across all solutions
3737+ have P as a transitive dependency. *)
3838+ let revdeps_counts : (OpamPackage.t, int) Hashtbl.t = Hashtbl.create 1000 in
3939+ List.iter (fun (_target, trans_deps) ->
4040+ OpamPackage.Map.iter (fun _pkg deps ->
4141+ OpamPackage.Set.iter (fun dep ->
4242+ let c = try Hashtbl.find revdeps_counts dep with Not_found -> 0 in
4343+ Hashtbl.replace revdeps_counts dep (c + 1)
4444+ ) deps
4545+ ) trans_deps
4646+ ) solutions;
4747+4848+ (* Step 2: For each unique OpamPackage.t, collect all distinct universes
4949+ it appears in, along with their quality metrics. *)
5050+ let pkg_universes : (OpamPackage.t, (string * int * int) list) Hashtbl.t =
5151+ Hashtbl.create 1000
5252+ in
5353+ List.iter (fun (_target, trans_deps) ->
5454+ OpamPackage.Map.iter (fun pkg deps ->
5555+ let uhash = universe_hash_of_deps deps in
5656+ let deps_count = OpamPackage.Set.cardinal deps in
5757+ let revdeps_count =
5858+ try Hashtbl.find revdeps_counts pkg with Not_found -> 0
5959+ in
6060+ let existing =
6161+ try Hashtbl.find pkg_universes pkg with Not_found -> []
6262+ in
6363+ (* Only add if this universe hash is new for this package *)
6464+ if not (List.exists (fun (h, _, _) -> String.equal h uhash) existing) then
6565+ Hashtbl.replace pkg_universes pkg
6666+ ((uhash, deps_count, revdeps_count) :: existing)
6767+ ) trans_deps
6868+ ) solutions;
6969+7070+ (* Step 3: For each package, pick the best universe.
7171+ Primary: maximize deps_count. Secondary: maximize revdeps_count. *)
7272+ let blessed_universe : (OpamPackage.t, string) Hashtbl.t =
7373+ Hashtbl.create 1000
7474+ in
7575+ Hashtbl.iter (fun pkg entries ->
7676+ let best_hash, _, _ =
7777+ List.fold_left
7878+ (fun ((_, bdc, brc) as best) ((_, dc, rc) as entry) ->
7979+ if dc > bdc || (dc = bdc && rc > brc) then entry else best)
8080+ (List.hd entries) (List.tl entries)
8181+ in
8282+ Hashtbl.replace blessed_universe pkg best_hash
8383+ ) pkg_universes;
8484+8585+ (* Step 4: For each target, generate a blessing map.
8686+ A package is blessed if its universe in this solution matches
8787+ the globally-chosen best universe. *)
8888+ List.map (fun (target, trans_deps) ->
8989+ let map =
9090+ OpamPackage.Map.mapi (fun pkg deps ->
9191+ let uhash = universe_hash_of_deps deps in
9292+ let blessed_uhash = Hashtbl.find blessed_universe pkg in
9393+ String.equal uhash blessed_uhash
9494+ ) trans_deps
9595+ in
9696+ (target, map)
9797+ ) solutions
9898+9999+(** Look up whether a package is blessed in the given map. *)
100100+let is_blessed map pkg =
101101+ match OpamPackage.Map.find_opt pkg map with
102102+ | Some b -> b
103103+ | None -> false
104104+105105+(** Save a blessing map to a JSON file.
106106+ Format: {"package.version": true/false, ...} *)
107107+let save_blessed_map filename map =
108108+ let entries =
109109+ OpamPackage.Map.fold (fun pkg blessed acc ->
110110+ (OpamPackage.to_string pkg, `Bool blessed) :: acc
111111+ ) map []
112112+ in
113113+ Yojson.Safe.to_file filename (`Assoc entries)
114114+115115+(** Load a blessing map from a JSON file. *)
116116+let load_blessed_map filename =
117117+ let json = Yojson.Safe.from_file filename in
118118+ let open Yojson.Safe.Util in
119119+ match json with
120120+ | `Assoc entries ->
121121+ List.fold_left (fun map (pkg_str, v) ->
122122+ let pkg = OpamPackage.of_string pkg_str in
123123+ let blessed = to_bool v in
124124+ OpamPackage.Map.add pkg blessed map
125125+ ) OpamPackage.Map.empty entries
126126+ | _ -> failwith "Invalid blessed map JSON: expected object"
+272
day10/bin/combine_docs.ml
···11+(** Combine documentation layers using overlayfs.
22+33+ Creates a unified view of all documentation by stacking the prep/
44+ directories from all layers with successful documentation. *)
55+66+type doc_layer = {
77+ pkg : OpamPackage.t;
88+ layer_hash : string;
99+ prep_path : string;
1010+ universe : string;
1111+ blessed : bool;
1212+}
1313+1414+(** Copy odoc support files (CSS, JS, fonts) to the mount point *)
1515+let copy_support_files ~support_files_dir ~mount_point =
1616+ if not (Sys.file_exists support_files_dir) then begin
1717+ Printf.eprintf "Support files directory not found: %s\n%!" support_files_dir;
1818+ false
1919+ end
2020+ else begin
2121+ Printf.printf "Copying odoc support files from %s...\n%!" support_files_dir;
2222+ (* Files to copy *)
2323+ let files = [
2424+ "odoc.css";
2525+ "highlight.pack.js";
2626+ "katex.min.css";
2727+ "katex.min.js";
2828+ "odoc_search.js";
2929+ ] in
3030+ (* Copy individual files *)
3131+ List.iter (fun file ->
3232+ let src = Path.(support_files_dir / file) in
3333+ let dst = Path.(mount_point / file) in
3434+ if Sys.file_exists src then begin
3535+ let cmd = Printf.sprintf "cp '%s' '%s'" src dst in
3636+ ignore (Sys.command cmd)
3737+ end)
3838+ files;
3939+ (* Copy fonts directory *)
4040+ let fonts_src = Path.(support_files_dir / "fonts") in
4141+ let fonts_dst = Path.(mount_point / "fonts") in
4242+ if Sys.file_exists fonts_src then begin
4343+ let cmd = Printf.sprintf "cp -r '%s' '%s'" fonts_src fonts_dst in
4444+ ignore (Sys.command cmd)
4545+ end;
4646+ Printf.printf "Copied support files\n%!";
4747+ true
4848+ end
4949+5050+(** Find sherlodoc.js in any doc-tools layer in the cache *)
5151+let find_sherlodoc_js ~cache_dir ~os_key =
5252+ let cache_path = Path.(cache_dir / os_key) in
5353+ if not (Sys.file_exists cache_path) then None
5454+ else
5555+ let entries = Sys.readdir cache_path |> Array.to_list in
5656+ let doc_tools_dirs = List.filter (fun e ->
5757+ String.length e > 10 && String.sub e 0 10 = "doc-tools-") entries in
5858+ (* Look for sherlodoc.js in each doc-tools layer *)
5959+ let rec find_in_layers = function
6060+ | [] -> None
6161+ | dir :: rest ->
6262+ let sherlodoc_path = Path.(cache_path / dir / "fs" / "home" / "opam" / "sherlodoc.js") in
6363+ if Sys.file_exists sherlodoc_path then Some sherlodoc_path
6464+ else find_in_layers rest
6565+ in
6666+ find_in_layers doc_tools_dirs
6767+6868+(** Copy sherlodoc.js to the mount point *)
6969+let copy_sherlodoc_js ~cache_dir ~os_key ~mount_point =
7070+ match find_sherlodoc_js ~cache_dir ~os_key with
7171+ | None ->
7272+ Printf.printf "No sherlodoc.js found in doc-tools layers (search will not work)\n%!";
7373+ true
7474+ | Some src_path ->
7575+ let dst_path = Path.(mount_point / "sherlodoc.js") in
7676+ let cmd = Printf.sprintf "cp '%s' '%s'" src_path dst_path in
7777+ let exit_code = Sys.command cmd in
7878+ if exit_code = 0 then begin
7979+ Printf.printf "Copied sherlodoc.js from %s\n%!" src_path;
8080+ true
8181+ end
8282+ else begin
8383+ Printf.eprintf "Warning: Failed to copy sherlodoc.js\n%!";
8484+ true
8585+ end
8686+8787+(** Extract universe hash from html_path.
8888+ Path format: .../prep/universes/{universe}/{pkg}/{version}/html *)
8989+let extract_universe html_path =
9090+ let parts = String.split_on_char '/' html_path in
9191+ let rec find_after_universes = function
9292+ | "universes" :: universe :: _ -> Some universe
9393+ | _ :: rest -> find_after_universes rest
9494+ | [] -> None
9595+ in
9696+ find_after_universes parts
9797+9898+(** Parse layer.json and extract doc info if successful *)
9999+let parse_layer_json ~cache_path ~layer_hash =
100100+ let layer_dir = Path.(cache_path / layer_hash) in
101101+ let layer_json = Path.(layer_dir / "layer.json") in
102102+ try
103103+ let json = Yojson.Safe.from_file layer_json in
104104+ let open Yojson.Safe.Util in
105105+ let pkg_str = json |> member "package" |> to_string in
106106+ let pkg = OpamPackage.of_string pkg_str in
107107+ match json |> member "doc" with
108108+ | `Null -> None
109109+ | doc ->
110110+ let status = doc |> member "status" |> to_string in
111111+ if status <> "success" then None
112112+ else
113113+ let html_path = doc |> member "html_path" |> to_string in
114114+ let blessed = doc |> member "blessed" |> to_bool in
115115+ let universe = extract_universe html_path |> Option.value ~default:"unknown" in
116116+ let prep_path = Path.(layer_dir / "prep") in
117117+ if Sys.file_exists prep_path then
118118+ Some { pkg; layer_hash; prep_path; universe; blessed }
119119+ else
120120+ None
121121+ with _ -> None
122122+123123+(** Check if a directory name is a doc layer (doc-{hash}, but not doc-driver- or doc-odoc-) *)
124124+let is_doc_layer_dir name =
125125+ let len = String.length name in
126126+ len > 4 && String.sub name 0 4 = "doc-"
127127+ && not (len > 11 && String.sub name 0 11 = "doc-driver-")
128128+ && not (len > 9 && String.sub name 0 9 = "doc-odoc-")
129129+130130+(** Scan cache directory for all doc layers with successful docs *)
131131+let scan_cache ~cache_dir ~os_key =
132132+ let cache_path = Path.(cache_dir / os_key) in
133133+ if not (Sys.file_exists cache_path) then []
134134+ else
135135+ let entries = Sys.readdir cache_path |> Array.to_list in
136136+ let doc_entries = List.filter is_doc_layer_dir entries in
137137+ List.filter_map
138138+ (fun layer_hash -> parse_layer_json ~cache_path ~layer_hash)
139139+ doc_entries
140140+141141+(** Create the overlay mount *)
142142+let create_overlay_mount ~layers ~mount_point ~work_dir =
143143+ if layers = [] then begin
144144+ Printf.eprintf "No documentation layers found\n%!";
145145+ false
146146+ end
147147+ else begin
148148+ (* Create mount point and work directories *)
149149+ let upper_dir = Path.(work_dir / "upper") in
150150+ let work_subdir = Path.(work_dir / "work") in
151151+ List.iter (fun dir ->
152152+ if not (Sys.file_exists dir) then
153153+ ignore (Sys.command (Printf.sprintf "mkdir -p '%s'" dir)))
154154+ [mount_point; upper_dir; work_subdir];
155155+156156+ (* Build lowerdir string - all prep directories *)
157157+ let lower_dirs = List.map (fun l -> l.prep_path) layers in
158158+ let lowerdir = String.concat ":" lower_dirs in
159159+160160+ (* Mount overlay *)
161161+ let mount_cmd = Printf.sprintf
162162+ "mount -t overlay overlay -o lowerdir=%s,upperdir=%s,workdir=%s '%s'"
163163+ lowerdir upper_dir work_subdir mount_point
164164+ in
165165+ Printf.printf "Mounting overlay with %d layers...\n%!" (List.length layers);
166166+ let exit_code = Sys.command mount_cmd in
167167+ if exit_code <> 0 then begin
168168+ Printf.eprintf "Failed to mount overlay (exit code %d)\n%!" exit_code;
169169+ Printf.eprintf "Command: %s\n%!" mount_cmd;
170170+ Printf.eprintf "Note: This requires root privileges. Try running with sudo.\n%!";
171171+ false
172172+ end
173173+ else begin
174174+ Printf.printf "Overlay mounted at %s\n%!" mount_point;
175175+ true
176176+ end
177177+ end
178178+179179+(** Create symlinks for blessed packages at the root level *)
180180+let create_blessed_symlinks ~layers ~mount_point =
181181+ let blessed = List.filter (fun l -> l.blessed) layers in
182182+ Printf.printf "Creating symlinks for %d blessed packages...\n%!" (List.length blessed);
183183+ List.iter (fun layer ->
184184+ let pkg_name = OpamPackage.name_to_string layer.pkg in
185185+ let pkg_version = OpamPackage.version_to_string layer.pkg in
186186+ let link_dir = Path.(mount_point / pkg_name) in
187187+ let link_path = Path.(link_dir / pkg_version) in
188188+ let target = Printf.sprintf "../universes/%s/%s/%s/html"
189189+ layer.universe pkg_name pkg_version in
190190+191191+ (* Create parent directory and symlink *)
192192+ if not (Sys.file_exists link_dir) then
193193+ ignore (Sys.command (Printf.sprintf "mkdir -p '%s'" link_dir));
194194+ if not (Sys.file_exists link_path) then begin
195195+ let cmd = Printf.sprintf "ln -s '%s' '%s'" target link_path in
196196+ ignore (Sys.command cmd);
197197+ Printf.printf " %s.%s -> %s\n%!" pkg_name pkg_version target
198198+ end)
199199+ blessed
200200+201201+(** Generate index.html for the combined docs *)
202202+let generate_index ~layers ~mount_point =
203203+ let blessed = List.filter (fun l -> l.blessed) layers in
204204+ let sorted = List.sort (fun a b -> OpamPackage.compare a.pkg b.pkg) blessed in
205205+ let index_content =
206206+ let buf = Buffer.create 4096 in
207207+ Buffer.add_string buf "<!DOCTYPE html>\n<html>\n<head>\n";
208208+ Buffer.add_string buf " <title>OCaml Package Documentation</title>\n";
209209+ Buffer.add_string buf " <style>\n";
210210+ Buffer.add_string buf " body { font-family: sans-serif; max-width: 800px; margin: 0 auto; padding: 20px; }\n";
211211+ Buffer.add_string buf " ul { list-style: none; padding: 0; }\n";
212212+ Buffer.add_string buf " li { padding: 5px 0; }\n";
213213+ Buffer.add_string buf " a { color: #0066cc; text-decoration: none; }\n";
214214+ Buffer.add_string buf " a:hover { text-decoration: underline; }\n";
215215+ Buffer.add_string buf " </style>\n";
216216+ Buffer.add_string buf "</head>\n<body>\n";
217217+ Buffer.add_string buf " <h1>OCaml Package Documentation</h1>\n";
218218+ Buffer.add_string buf (Printf.sprintf " <p>%d packages</p>\n" (List.length sorted));
219219+ Buffer.add_string buf " <ul>\n";
220220+ List.iter (fun layer ->
221221+ let pkg_name = OpamPackage.name_to_string layer.pkg in
222222+ let pkg_version = OpamPackage.version_to_string layer.pkg in
223223+ let href = Printf.sprintf "%s/%s/" pkg_name pkg_version in
224224+ Buffer.add_string buf
225225+ (Printf.sprintf " <li><a href=\"%s\">%s.%s</a></li>\n" href pkg_name pkg_version))
226226+ sorted;
227227+ Buffer.add_string buf " </ul>\n";
228228+ Buffer.add_string buf "</body>\n</html>\n";
229229+ Buffer.contents buf
230230+ in
231231+ let index_path = Path.(mount_point / "index.html") in
232232+ try
233233+ let oc = open_out index_path in
234234+ output_string oc index_content;
235235+ close_out oc;
236236+ Printf.printf "Generated index.html with %d blessed packages\n%!" (List.length sorted);
237237+ true
238238+ with exn ->
239239+ Printf.eprintf "Failed to write index: %s\n%!" (Printexc.to_string exn);
240240+ false
241241+242242+(** Main combine function *)
243243+let combine ~cache_dir ~os_key ~mount_point ~work_dir ~generate_idx
244244+ ~support_files_dir =
245245+ let layers = scan_cache ~cache_dir ~os_key in
246246+ Printf.printf "Found %d documentation layers\n%!" (List.length layers);
247247+248248+ if create_overlay_mount ~layers ~mount_point ~work_dir then begin
249249+ create_blessed_symlinks ~layers ~mount_point;
250250+ (* Copy odoc support files if directory is specified *)
251251+ (match support_files_dir with
252252+ | Some dir -> ignore (copy_support_files ~support_files_dir:dir ~mount_point)
253253+ | None -> ());
254254+ (* Copy sherlodoc.js from doc-tools layer if available *)
255255+ ignore (copy_sherlodoc_js ~cache_dir ~os_key ~mount_point);
256256+ (* Generate index *)
257257+ if generate_idx then
258258+ ignore (generate_index ~layers ~mount_point);
259259+ true
260260+ end
261261+ else
262262+ false
263263+264264+(** Unmount the overlay *)
265265+let unmount ~mount_point =
266266+ let cmd = Printf.sprintf "umount '%s'" mount_point in
267267+ let exit_code = Sys.command cmd in
268268+ if exit_code = 0 then
269269+ Printf.printf "Unmounted %s\n%!" mount_point
270270+ else
271271+ Printf.eprintf "Failed to unmount %s (exit code %d)\n%!" mount_point exit_code;
272272+ exit_code = 0
···11+type rejection =
22+ | UserConstraint of OpamFormula.atom
33+ | Unavailable
44+55+let ( / ) = Filename.concat
66+77+let with_dir path fn =
88+ let ch = Unix.opendir path in
99+ Fun.protect ~finally:(fun () -> Unix.closedir ch) (fun () -> fn ch)
1010+1111+let list_dir path =
1212+ let rec aux acc ch =
1313+ match Unix.readdir ch with
1414+ | name when name.[0] <> '.' -> aux (name :: acc) ch
1515+ | _ -> aux acc ch
1616+ | exception End_of_file -> acc
1717+ in
1818+ with_dir path (aux [])
1919+2020+type t = {
2121+ env : string -> OpamVariable.variable_contents option;
2222+ packages_dirs : string list;
2323+ pins : (OpamPackage.Version.t * OpamFile.OPAM.t) OpamPackage.Name.Map.t;
2424+ constraints : OpamFormula.version_constraint OpamTypes.name_map; (* User-provided constraints *)
2525+ test : OpamPackage.Name.Set.t;
2626+ prefer_oldest : bool;
2727+ doc : bool; (* Whether to filter in {with-doc} deps *)
2828+ post : bool; (* Whether to filter in {post} deps *)
2929+}
3030+3131+let load t pkg =
3232+ let { OpamPackage.name; version = _ } = pkg in
3333+ match OpamPackage.Name.Map.find_opt name t.pins with
3434+ | Some (_, opam) -> opam
3535+ | None ->
3636+ List.find_map
3737+ (fun packages_dir ->
3838+ let opam = packages_dir / OpamPackage.Name.to_string name / OpamPackage.to_string pkg / "opam" in
3939+ if Sys.file_exists opam then Some opam else None)
4040+ t.packages_dirs
4141+ |> Option.get |> OpamFilename.raw |> OpamFile.make |> OpamFile.OPAM.read
4242+4343+let user_restrictions t name = OpamPackage.Name.Map.find_opt name t.constraints
4444+let dev = OpamPackage.Version.of_string "dev"
4545+4646+let std_env ?(ocaml_native = true) ?sys_ocaml_version ?opam_version ~arch ~os ~os_distribution ~os_family ~os_version () = function
4747+ | "arch" -> Some (OpamTypes.S arch)
4848+ | "os" -> Some (OpamTypes.S os)
4949+ | "os-distribution" -> Some (OpamTypes.S os_distribution)
5050+ | "os-version" -> Some (OpamTypes.S os_version)
5151+ | "os-family" -> Some (OpamTypes.S os_family)
5252+ | "opam-version" -> Some (OpamVariable.S (Option.value ~default:OpamVersion.(to_string current) opam_version))
5353+ | "sys-ocaml-version" -> sys_ocaml_version |> Option.map (fun v -> OpamTypes.S v)
5454+ | "ocaml:native" -> Some (OpamTypes.B ocaml_native)
5555+ | "enable-ocaml-beta-repository" -> None (* Fake variable? *)
5656+ | _ ->
5757+ None
5858+5959+let env t pkg v =
6060+ if List.mem v OpamPackageVar.predefined_depends_variables then None
6161+ else
6262+ match OpamVariable.Full.to_string v with
6363+ | "version" -> Some (OpamTypes.S (OpamPackage.Version.to_string (OpamPackage.version pkg)))
6464+ | x -> t.env x
6565+6666+let filter_deps t pkg f =
6767+ let dev = OpamPackage.Version.compare (OpamPackage.version pkg) dev = 0 in
6868+ let test = OpamPackage.Name.Set.mem (OpamPackage.name pkg) t.test in
6969+ f |> OpamFilter.partial_filter_formula (env t pkg) |> OpamFilter.filter_deps ~build:true ~post:t.post ~test ~doc:t.doc ~dev ~dev_setup:false ~default:false
7070+7171+let version_compare t (v1, v1_avoid, _) (v2, v2_avoid, _) =
7272+ match (v1_avoid, v2_avoid) with
7373+ | true, true
7474+ | false, false ->
7575+ if t.prefer_oldest then OpamPackage.Version.compare v1 v2 else OpamPackage.Version.compare v2 v1
7676+ | true, false -> 1
7777+ | false, true -> -1
7878+7979+let candidates t name =
8080+ match OpamPackage.Name.Map.find_opt name t.pins with
8181+ | Some (version, opam) -> [ (version, Ok opam) ]
8282+ | None ->
8383+ let versions =
8484+ List.concat_map
8585+ (fun packages_dir ->
8686+ try packages_dir / OpamPackage.Name.to_string name |> list_dir with
8787+ | Unix.Unix_error (Unix.ENOENT, _, _) -> [])
8888+ t.packages_dirs
8989+ |> List.sort_uniq compare
9090+ in
9191+ let user_constraints = user_restrictions t name in
9292+ versions
9393+ |> List.filter_map (fun dir ->
9494+ match OpamPackage.of_string_opt dir with
9595+ | Some pkg ->
9696+ List.find_opt (fun packages_dir -> Sys.file_exists (packages_dir / OpamPackage.Name.to_string name / dir / "opam")) t.packages_dirs
9797+ |> Option.map (fun _ -> OpamPackage.version pkg)
9898+ | _ -> None)
9999+ |> List.filter_map (fun v ->
100100+ let pkg = OpamPackage.create name v in
101101+ let opam = load t pkg in
102102+ let avoid = OpamFile.OPAM.has_flag Pkgflag_AvoidVersion opam in
103103+ let available = OpamFile.OPAM.available opam in
104104+ match OpamFilter.eval_to_bool ~default:false (env t pkg) available with
105105+ | true -> Some (v, avoid, opam)
106106+ | false -> None)
107107+ (* https://github.com/ocaml-opam/opam-0install-cudf/issues/5 cf 4.12.1 *)
108108+ |> (fun l -> if List.for_all (fun (_, avoid, _) -> avoid) l then [] else l)
109109+ |> List.sort (version_compare t)
110110+ |> List.map (fun (v, _, opam) ->
111111+ match user_constraints with
112112+ | Some test when not (OpamFormula.check_version_formula (OpamFormula.Atom test) v) -> (v, Error (UserConstraint (name, Some test)))
113113+ | _ -> (v, Ok opam))
114114+115115+let pp_rejection f = function
116116+ | UserConstraint x -> Fmt.pf f "Rejected by user-specified constraint %s" (OpamFormula.string_of_atom x)
117117+ | Unavailable -> Fmt.string f "Availability condition not satisfied"
118118+119119+let create ?(prefer_oldest = false) ?(test = OpamPackage.Name.Set.empty) ?(pins = OpamPackage.Name.Map.empty) ?(doc = false) ?(post = true) ~constraints ~env packages_dirs =
120120+ { env; packages_dirs; pins; constraints; test; prefer_oldest; doc; post }
121121+122122+(** Create a new context with different doc/post settings.
123123+ This is used to compute compile vs link deps separately. *)
124124+let with_doc_post ~doc ~post t =
125125+ { t with doc; post }
126126+127127+(** Extract x-extra-doc-deps from an opam file.
128128+ Same implementation as in odoc_gen.ml but needed here to extend packages.
129129+ Handles both simple package names and package names with constraints. *)
130130+let get_extra_doc_deps opamfile =
131131+ let open OpamParserTypes.FullPos in
132132+ let extensions = OpamFile.OPAM.extensions opamfile in
133133+ match OpamStd.String.Map.find_opt "x-extra-doc-deps" extensions with
134134+ | None -> OpamPackage.Name.Set.empty
135135+ | Some value ->
136136+ let extract_name item =
137137+ match item.pelem with
138138+ | String name -> Some name
139139+ | Option (inner, _) ->
140140+ (match inner.pelem with
141141+ | String name -> Some name
142142+ | _ -> None)
143143+ | _ -> None
144144+ in
145145+ let extract_names acc v =
146146+ match v.pelem with
147147+ | List { pelem = items; _ } ->
148148+ List.fold_left (fun acc item ->
149149+ match extract_name item with
150150+ | Some name ->
151151+ OpamPackage.Name.Set.add (OpamPackage.Name.of_string name) acc
152152+ | None -> acc
153153+ ) acc items
154154+ | _ -> acc
155155+ in
156156+ extract_names OpamPackage.Name.Set.empty value
157157+158158+(** Create an extended context where x-extra-doc-deps are added to each package's
159159+ regular depends. This is used for doc link solving - x-extra-doc-deps packages
160160+ need to be in the solution to be available during doc linking.
161161+162162+ The approach:
163163+ 1. For each pinned package, read its opam file to get x-extra-doc-deps
164164+ 2. Create a new opam file with those deps added to the depends formula
165165+ 3. Create a new context with the extended pins *)
166166+let extend_with_extra_doc_deps t =
167167+ let new_pins = OpamPackage.Name.Map.mapi (fun _name (version, opam) ->
168168+ let extra_deps = get_extra_doc_deps opam in
169169+ if OpamPackage.Name.Set.is_empty extra_deps then
170170+ (version, opam)
171171+ else begin
172172+ (* Add x-extra-doc-deps to the depends formula *)
173173+ let depends = OpamFile.OPAM.depends opam in
174174+ let extra_formula =
175175+ OpamPackage.Name.Set.fold (fun dep_name acc ->
176176+ (* Add each extra dep as an unconditional dependency *)
177177+ let atom = OpamFormula.Atom (dep_name, OpamFormula.Empty) in
178178+ OpamFormula.And (acc, atom)
179179+ ) extra_deps OpamFormula.Empty
180180+ in
181181+ let new_depends = match extra_formula with
182182+ | OpamFormula.Empty -> depends
183183+ | _ -> OpamFormula.And (depends, extra_formula)
184184+ in
185185+ let new_opam = OpamFile.OPAM.with_depends new_depends opam in
186186+ (version, new_opam)
187187+ end
188188+ ) t.pins in
189189+ { t with pins = new_pins }
+116
day10/bin/doc_tools.ml
···11+(** Doc tools layer management for odoc toolchain.
22+33+ Split into two layers:
44+ 1. Driver layer (shared): odoc_driver_voodoo, sherlodoc, odoc-md
55+ - Built once with OCaml 5.x
66+ - These tools just need executables, don't need to match target compiler
77+88+ 2. Odoc layer (per OCaml version): odoc
99+ - Must be built with same OCaml version as target packages
1010+ - .cmt/.cmti files have version-specific formats
1111+1212+ All tools are pinned to odoc 3.1 from the configured repo/branch. *)
1313+1414+(** Compute hash for the shared driver layer.
1515+ Only depends on repo/branch since it's always built with a fixed OCaml version. *)
1616+let driver_layer_hash ~(config : Config.t) =
1717+ let components = [ "driver"; config.doc_tools_repo; config.doc_tools_branch ] in
1818+ String.concat "|" components |> Digest.string |> Digest.to_hex
1919+2020+(** Directory name for the driver layer *)
2121+let driver_layer_name ~(config : Config.t) =
2222+ "doc-driver-" ^ driver_layer_hash ~config
2323+2424+(** Full path to the driver layer *)
2525+let driver_layer_path ~(config : Config.t) =
2626+ let os_key = Config.os_key ~config in
2727+ Path.(config.dir / os_key / driver_layer_name ~config)
2828+2929+(** Generate build script for the shared driver layer.
3030+ Builds odoc_driver_voodoo, sherlodoc, and odoc-md with OCaml 5.x. *)
3131+let driver_build_script ~(config : Config.t) =
3232+ let repo = config.doc_tools_repo in
3333+ let branch = config.doc_tools_branch in
3434+ (* Use a recent OCaml 5.x for building the driver tools *)
3535+ String.concat " && "
3636+ [
3737+ "opam install -y ocaml-base-compiler.5.2.1";
3838+ (* Pin all packages from the odoc repo *)
3939+ Printf.sprintf "opam pin add -yn odoc %s#%s" repo branch;
4040+ Printf.sprintf "opam pin add -yn odoc-parser %s#%s" repo branch;
4141+ Printf.sprintf "opam pin add -yn odoc-md %s#%s" repo branch;
4242+ Printf.sprintf "opam pin add -yn sherlodoc %s#%s" repo branch;
4343+ Printf.sprintf "opam pin add -yn odoc-driver %s#%s" repo branch;
4444+ (* Install the driver tools *)
4545+ "opam install -y odoc-driver odoc-md sherlodoc";
4646+ (* Generate sherlodoc.js for client-side search *)
4747+ "eval $(opam env) && sherlodoc js > /home/opam/sherlodoc.js";
4848+ (* Verify the tools are installed *)
4949+ "which odoc_driver_voodoo && which sherlodoc";
5050+ ]
5151+5252+(** Check if driver layer exists *)
5353+let driver_exists ~(config : Config.t) : bool =
5454+ Sys.file_exists (driver_layer_path ~config)
5555+5656+(** Get the hash/name for the driver layer *)
5757+let get_driver_hash ~(config : Config.t) : string =
5858+ driver_layer_name ~config
5959+6060+(** Check if odoc_driver_voodoo is available in the driver layer *)
6161+let has_odoc_driver_voodoo ~(config : Config.t) : bool =
6262+ let voodoo_path = Path.(driver_layer_path ~config / "fs" / "home" / "opam" / ".opam" / "default" / "bin" / "odoc_driver_voodoo") in
6363+ Sys.file_exists voodoo_path
6464+6565+(** Path to sherlodoc.js within the driver layer *)
6666+let sherlodoc_js_path ~(config : Config.t) =
6767+ Path.(driver_layer_path ~config / "fs" / "home" / "opam" / "sherlodoc.js")
6868+6969+(* --- Per-version odoc layer --- *)
7070+7171+(** Compute hash for the per-version odoc layer.
7272+ Depends on OCaml version and repo/branch for odoc 3.1. *)
7373+let odoc_layer_hash ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
7474+ let version = OpamPackage.Version.to_string (OpamPackage.version ocaml_version) in
7575+ let components = [ "odoc"; version; config.doc_tools_repo; config.doc_tools_branch ] in
7676+ String.concat "|" components |> Digest.string |> Digest.to_hex
7777+7878+(** Directory name for the odoc layer *)
7979+let odoc_layer_name ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
8080+ "doc-odoc-" ^ odoc_layer_hash ~config ~ocaml_version
8181+8282+(** Full path to the odoc layer *)
8383+let odoc_layer_path ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
8484+ let os_key = Config.os_key ~config in
8585+ Path.(config.dir / os_key / odoc_layer_name ~config ~ocaml_version)
8686+8787+(** Generate build script for the per-version odoc layer.
8888+ Builds odoc with the specified OCaml version, pinned to 3.1 from repo/branch. *)
8989+let odoc_build_script ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
9090+ let repo = config.doc_tools_repo in
9191+ let branch = config.doc_tools_branch in
9292+ let version = OpamPackage.Version.to_string (OpamPackage.version ocaml_version) in
9393+ String.concat " && "
9494+ [
9595+ Printf.sprintf "opam install -y ocaml-base-compiler.%s" version;
9696+ (* Pin odoc and odoc-parser from the repo *)
9797+ Printf.sprintf "opam pin add -yn odoc %s#%s" repo branch;
9898+ Printf.sprintf "opam pin add -yn odoc-parser %s#%s" repo branch;
9999+ (* Install odoc *)
100100+ "opam install -y odoc";
101101+ (* Verify odoc is installed and show version *)
102102+ "eval $(opam env) && which odoc && odoc --version";
103103+ ]
104104+105105+(** Check if odoc layer exists for this OCaml version *)
106106+let odoc_exists ~(config : Config.t) ~(ocaml_version : OpamPackage.t) : bool =
107107+ Sys.file_exists (odoc_layer_path ~config ~ocaml_version)
108108+109109+(** Get the hash/name for the odoc layer *)
110110+let get_odoc_hash ~(config : Config.t) ~(ocaml_version : OpamPackage.t) : string =
111111+ odoc_layer_name ~config ~ocaml_version
112112+113113+(** Check if odoc is available in the odoc layer *)
114114+let has_odoc ~(config : Config.t) ~(ocaml_version : OpamPackage.t) : bool =
115115+ let odoc_path = Path.(odoc_layer_path ~config ~ocaml_version / "fs" / "home" / "opam" / ".opam" / "default" / "bin" / "odoc") in
116116+ Sys.file_exists odoc_path
+72
day10/bin/docker.ml
···11+open Dockerfile
22+33+let platform = function
44+ | "x86_64" | "amd64" -> "linux/amd64"
55+ | "i386" | "i486" | "i586" | "i686" -> "linux/386"
66+ | "aarch64" -> "linux/arm64"
77+ | "armv7l" -> "linux/arm/v7"
88+ | "armv6l" -> "linux/arm/v6"
99+ | "ppc64le" -> "linux/ppc64le"
1010+ | "riscv64" -> "linux/riscv64"
1111+ | "s390x" -> "linux/s390x"
1212+ | arch -> "linux/" ^ arch
1313+1414+let opam ~(config : Config.t) base_image =
1515+ let opam_arch = match config.arch with
1616+ | "x86_64" | "amd64" -> "x86_64"
1717+ | "aarch64" -> "aarch64"
1818+ | "armv7l" -> "armhf"
1919+ | "i386" | "i486" | "i586" | "i686" -> "i686"
2020+ | arch -> arch
2121+ in
2222+ from ~platform:(platform config.arch) ~alias:"opam-builder" base_image
2323+ @@ run "apt update && apt install -y curl"
2424+ @@ run "curl -fsSL https://github.com/ocaml/opam/releases/download/2.4.1/opam-2.4.1-%s-linux -o /usr/local/bin/opam && chmod +x /usr/local/bin/opam" opam_arch
2525+2626+let opam_build ~(config : Config.t) base_image =
2727+ from ~platform:(platform config.arch) ~alias:"opam-build-builder" base_image
2828+ @@ run "apt update && apt install -y build-essential git curl unzip bubblewrap"
2929+ @@ copy ~from:"opam-builder" ~src:[ "/usr/local/bin/opam" ] ~dst:"/usr/local/bin/opam" ()
3030+ @@ run "opam init --disable-sandboxing -a --bare -y"
3131+ @@ run "git clone --depth 1 --branch master https://github.com/mtelvers/opam-build.git /tmp/opam-build"
3232+ @@ workdir "/tmp/opam-build"
3333+ @@ run "opam switch create . 5.3.0 --deps-only -y"
3434+ @@ run "opam exec -- dune build --release"
3535+ @@ run "install -m 755 _build/default/bin/main.exe /usr/local/bin/opam-build"
3636+3737+let debian ~(config : Config.t) ~temp_dir _opam_repository build_log uid gid =
3838+ let base_image = Printf.sprintf "%s:%s" config.os_distribution config.os_version in
3939+ let dockerfile =
4040+ (opam ~config base_image) @@ (opam_build ~config base_image)
4141+ @@ from ~platform:(platform config.arch) base_image
4242+ @@ run "apt update && apt upgrade -y"
4343+ @@ run "apt install build-essential unzip bubblewrap git sudo curl rsync -y"
4444+ @@ copy ~from:"opam-builder" ~src:[ "/usr/local/bin/opam" ] ~dst:"/usr/local/bin/opam" ()
4545+ @@ copy ~from:"opam-build-builder" ~src:[ "/usr/local/bin/opam-build" ] ~dst:"/usr/local/bin/opam-build" ()
4646+ @@ run "echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections"
4747+ @@ run "if getent passwd %i; then userdel -r $(id -nu %i); fi" uid uid
4848+ @@ run "groupadd --gid %i opam" gid
4949+ @@ run "adduser --disabled-password --gecos '@opam' --no-create-home --uid %i --gid %i --home /home/opam opam" uid gid
5050+ @@ run "mkdir -p /home/opam && chown -R %i:%i /home/opam" uid gid
5151+ @@ run "echo 'opam ALL=(ALL:ALL) NOPASSWD:ALL' > /etc/sudoers.d/opam"
5252+ @@ run "chmod 440 /etc/sudoers.d/opam" @@ run "chown root:root /etc/sudoers.d/opam"
5353+ @@ copy ~chown:(string_of_int uid ^ ":" ^ string_of_int gid) ~src:[ "opam-repository" ] ~dst:"/home/opam/opam-repository" ()
5454+ @@ user "%i:%i" uid gid @@ workdir "/home/opam"
5555+ @@ run "opam init -k local -a /home/opam/opam-repository --bare --disable-sandboxing -y"
5656+ @@ run "opam switch create default --empty"
5757+ in
5858+ let dockerfile_path = Path.(temp_dir / "Dockerfile") in
5959+ let () = Os.write_to_file dockerfile_path (Dockerfile.string_of_t dockerfile) in
6060+ let tag = Printf.sprintf "day10-%s:%s" config.os_distribution config.os_version in
6161+ let build_result = Os.exec ~stdout:build_log ~stderr:build_log [ "docker"; "build"; "--network=host"; "-t"; tag; temp_dir ] in
6262+ match build_result with
6363+ | 0 ->
6464+ let rootfs = Path.(temp_dir / "fs") in
6565+ let container = Filename.basename temp_dir in
6666+ let () = Os.mkdir rootfs in
6767+ let _ = Os.sudo ~stdout:"/dev/null" [ "docker"; "create"; "--name"; container; tag ] in
6868+ let _ = Os.run (String.concat " " [ "sudo"; "docker"; "export"; container; "|"; "sudo"; "tar"; "-xf"; "-"; "-C"; rootfs ]) in
6969+ let _ = Os.sudo ~stdout:"/dev/null" [ "docker"; "rm"; container ] in
7070+ let _ = Os.sudo [ "sh"; "-c"; ("rm -f " ^ Path.(rootfs / "home" / "opam" / ".opam" / "repo" / "state-*.cache")) ] in
7171+ 0
7272+ | build_result -> build_result
+14
day10/bin/dot_solution.ml
···11+let to_string pkgs =
22+ let quoted package = "\"" ^ OpamPackage.to_string package ^ "\"" in
33+ let graph =
44+ OpamPackage.Map.to_list pkgs
55+ |> List.filter_map (fun (pkg, deps) ->
66+ match OpamPackage.Set.to_list deps with
77+ | [] -> None
88+ | [ p ] -> Some (" " ^ quoted pkg ^ " -> " ^ quoted p ^ ";")
99+ | lst -> Some (" " ^ quoted pkg ^ " -> {" ^ (lst |> List.map quoted |> String.concat " ") ^ "}"))
1010+ |> String.concat "\n"
1111+ in
1212+ "digraph opam {\n" ^ graph ^ "\n}\n"
1313+1414+let save name pkgs = Os.write_to_file name (to_string pkgs)
+40
day10/bin/dummy.ml
···11+type t = { config : Config.t }
22+33+let init ~(config : Config.t) = { config }
44+let deinit ~t:_ = ()
55+let config ~t = t.config
66+77+let layer_hash ~t deps =
88+ let hashes =
99+ List.map
1010+ (fun opam ->
1111+ opam |> Util.opam_file t.config.opam_repositories |> Option.get |> OpamFile.OPAM.effective_part |> OpamFile.OPAM.write_to_string
1212+ |> OpamHash.compute_from_string |> OpamHash.to_string)
1313+ deps
1414+ in
1515+ String.concat " " hashes |> Digest.string |> Digest.to_hex
1616+1717+let run ~t:_ ~temp_dir:_ _opam_repository _build_log = 0
1818+1919+let build ~t ~temp_dir _build_log _pkg ordered_hashes =
2020+ let config = t.config in
2121+ let () =
2222+ List.iter
2323+ (fun hash ->
2424+ let path = Path.(config.dir / hash) in
2525+ let e = if Sys.file_exists path then "ok" else "not found" in
2626+ Printf.printf "%s: %s\n" path e)
2727+ ordered_hashes
2828+ in
2929+ let _rootfs = Path.(temp_dir / "fs") in
3030+ 0
3131+3232+let doc_layer_hash ~t:_ ~build_hash:_ ~dep_doc_hashes:_ ~ocaml_version:_ ~blessed:_ = ""
3333+3434+(* Documentation generation not supported in dummy container *)
3535+let generate_docs ~t:_ ~build_layer_dir:_ ~doc_layer_dir:_ ~dep_doc_hashes:_ ~pkg:_ ~installed_libs:_ ~installed_docs:_ ~phase:_ ~ocaml_version:_ = None
3636+3737+let jtw_layer_hash ~t:_ ~build_hash:_ ~ocaml_version:_ = ""
3838+3939+(* JTW generation not supported in dummy container *)
4040+let generate_jtw ~t:_ ~build_layer_dir:_ ~jtw_layer_dir:_ ~dep_build_hashes:_ ~pkg:_ ~installed_libs:_ ~ocaml_version:_ = None
···11+type t = {
22+ config : Config.t;
33+ uid : int;
44+ gid : int;
55+}
66+77+let env = [ ("HOME", "/home/opam"); ("OPAMYES", "1"); ("OPAMCONFIRMLEVEL", "unsafe-yes"); ("OPAMERRLOGLEN", "0"); ("OPAMPRECISETRACKING", "1") ]
88+99+let install_script =
1010+ {|#!/bin/sh
1111+#-
1212+# Copyright (c) 2011 Nathan Whitehorn
1313+# Copyright (c) 2013-2015 Devin Teske
1414+# All rights reserved.
1515+#
1616+# Redistribution and use in source and binary forms, with or without
1717+# modification, are permitted provided that the following conditions
1818+# are met:
1919+# 1. Redistributions of source code must retain the above copyright
2020+# notice, this list of conditions and the following disclaimer.
2121+# 2. Redistributions in binary form must reproduce the above copyright
2222+# notice, this list of conditions and the following disclaimer in the
2323+# documentation and/or other materials provided with the distribution.
2424+#
2525+# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
2626+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
2727+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
2828+# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
2929+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
3030+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
3131+# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
3232+# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
3333+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
3434+# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
3535+# SUCH DAMAGE.
3636+#
3737+# $FreeBSD$
3838+#
3939+############################################################ INCLUDES
4040+4141+BSDCFG_SHARE="/usr/share/bsdconfig"
4242+. $BSDCFG_SHARE/common.subr || exit 1
4343+4444+############################################################ MAIN
4545+4646+f_dprintf "Began Installation at %s" "$( date )"
4747+f_dprintf "BSDINSTALL_CHROOT %s" "$1"
4848+export BSDINSTALL_CHROOT=$1
4949+5050+error() {
5151+ local msg
5252+ if [ -n "$1" ]; then
5353+ f_dprintf "error %s" "$1"
5454+ fi
5555+ exit
5656+}
5757+5858+5959+rm -rf $BSDINSTALL_TMPETC
6060+mkdir $BSDINSTALL_TMPETC
6161+mkdir -p $1 || error "mkdir failed for $1"
6262+6363+test ! -d $BSDINSTALL_DISTDIR && mkdir -p $BSDINSTALL_DISTDIR
6464+6565+if [ ! -f $BSDINSTALL_DISTDIR/MANIFEST -a -z "$BSDINSTALL_DISTSITE" ]; then
6666+ export BSDINSTALL_DISTSITE="https://download.freebsd.org/ftp/releases/amd64/amd64/14.2-RELEASE"
6767+ fetch -o $BSDINSTALL_DISTDIR/MANIFEST $BSDINSTALL_DISTSITE/MANIFEST || error "Could not download $BSDINSTALL_DISTSITE/MANIFEST"
6868+fi
6969+7070+export DISTRIBUTIONS="base.txz"
7171+7272+FETCH_DISTRIBUTIONS=""
7373+for dist in $DISTRIBUTIONS; do
7474+ if [ ! -f $BSDINSTALL_DISTDIR/$dist ]; then
7575+ FETCH_DISTRIBUTIONS="$FETCH_DISTRIBUTIONS $dist"
7676+ fi
7777+done
7878+FETCH_DISTRIBUTIONS=`echo $FETCH_DISTRIBUTIONS` # Trim white space
7979+8080+if [ -n "$FETCH_DISTRIBUTIONS" -a -z "$BSDINSTALL_DISTSITE" ]; then
8181+ exec 3>&1
8282+ BSDINSTALL_DISTSITE=`bsdinstall mirrorselect 2>&1 1>&3`
8383+ MIRROR_BUTTON=$?
8484+ exec 3>&-
8585+ test $MIRROR_BUTTON -eq 0 || error "No mirror selected"
8686+ export BSDINSTALL_DISTSITE
8787+fi
8888+8989+if [ ! -z "$FETCH_DISTRIBUTIONS" ]; then
9090+ bsdinstall distfetch || error "Failed to fetch distribution"
9191+fi
9292+9393+bsdinstall checksum || error "Distribution checksum failed"
9494+bsdinstall distextract || error "Distribution extract failed"
9595+9696+bsdinstall config || error "Failed to save config"
9797+cp /etc/resolv.conf $1/etc
9898+9999+bsdinstall entropy
100100+101101+f_dprintf "Installation Completed at %s" "$(date)"
102102+exit $SUCCESS
103103+104104+################################################################################
105105+# END
106106+################################################################################|}
107107+108108+let init ~(config : Config.t) =
109109+ let uid, gid =
110110+ match (Unix.getuid (), Unix.getgid ()) with
111111+ | 0, _ -> (1000, 1000)
112112+ | uid, gid -> (uid, gid)
113113+ in
114114+ { config; uid; gid }
115115+116116+let deinit ~t:_ = ()
117117+let config ~t = t.config
118118+119119+let layer_hash ~t deps =
120120+ let hashes =
121121+ List.map
122122+ (fun opam ->
123123+ opam |> Util.opam_file t.config.opam_repositories |> Option.get |> OpamFile.OPAM.effective_part |> OpamFile.OPAM.write_to_string
124124+ |> OpamHash.compute_from_string |> OpamHash.to_string)
125125+ deps
126126+ in
127127+ String.concat " " hashes |> Digest.string |> Digest.to_hex
128128+129129+let jail ~temp_dir ~rootfs ~mounts ~env ~argv ~network ~username =
130130+ let mounts =
131131+ let fstab = Path.(temp_dir / "fstab") in
132132+ let () =
133133+ List.map
134134+ (fun (m : Mount.t) ->
135135+ let full = Path.(temp_dir / m.dst) in
136136+ let () = if not (Sys.file_exists full) then ignore (Os.sudo [ "mkdir"; "-p"; full ]) in
137137+ String.concat " " [ m.src; full; m.ty; (if List.mem "ro" m.options then "ro" else "rw"); "0"; "0" ])
138138+ mounts
139139+ |> String.concat "\n" |> Os.write_to_file fstab
140140+ in
141141+ [ "mount.fstab=" ^ fstab ]
142142+ in
143143+ let env = List.map (fun (k, v) -> k ^ "='" ^ v ^ "'") env in
144144+ let params = String.concat " " [ (if List.is_empty env then "" else String.concat " " ("env" :: env)); String.concat " && " argv ] in
145145+ let network = if network then [ "ip4=inherit"; "ip6=inherit"; "host=inherit" ] else [ "exec.start=/sbin/ifconfig lo0 127.0.0.1/8"; "vnet" ] in
146146+ let cmd = Option.fold ~none:[ "command=/bin/sh" ] ~some:(fun u -> [ "command=/usr/bin/su"; "-l"; u ]) username in
147147+ [ "jail"; "-c"; "name=" ^ Filename.basename temp_dir; "path=" ^ rootfs; "mount.devfs" ] @ mounts @ network @ cmd @ [ "-c"; params ]
148148+149149+let run ~t ~temp_dir opam_repository build_log =
150150+ let config = t.config in
151151+ let rootfs = Path.(temp_dir / "fs") in
152152+ let () = Os.mkdir rootfs in
153153+ let script = Path.(temp_dir / "install_script") in
154154+ let () = Os.write_to_file script install_script in
155155+ let _ = Os.sudo ~stdout:"/dev/null" [ "bsdinstall"; "-D"; build_log; "script"; script; rootfs ] in
156156+ let _ = Os.sudo [ "chmod"; "777"; build_log ] in
157157+ let _ = Os.sudo ~stdout:build_log [ "freebsd-update"; "-b"; rootfs; "fetch"; "install" ] in
158158+ let _ = Os.sudo ~stdout:build_log [ "pkg"; "--chroot"; rootfs; "install"; "-y"; "pkg" ] in
159159+ let _ = Os.sudo ~stdout:build_log [ "pkg"; "--chroot"; rootfs; "upgrade"; "-y"; "-f" ] in
160160+ let opam = Path.(rootfs / "usr" / "bin" / "opam") in
161161+ let _ = Os.sudo [ "curl"; "-L"; "https://github.com/ocaml/opam/releases/download/2.3.0/opam-2.3.0-" ^ config.arch ^ "-freebsd"; "-o"; opam ] in
162162+ let _ = Os.sudo [ "sudo"; "chmod"; "+x"; opam ] in
163163+ let opam_build = Path.(rootfs / "usr" / "bin" / "opam-build") in
164164+ let _ = Os.sudo [ "curl"; "-L"; "https://github.com/mtelvers/opam-build/releases/download/1.2.0/opam-build-1.2.0-" ^ config.arch ^ "-freebsd"; "-o"; opam_build ] in
165165+ let _ = Os.sudo [ "sudo"; "chmod"; "+x"; opam_build ] in
166166+ let argv =
167167+ [
168168+ "pw groupadd opam -g " ^ string_of_int t.gid;
169169+ "pw useradd -m -n opam -g opam -u " ^ string_of_int t.uid ^ " -h - -c opam";
170170+ "pkg install -y sudo gmake git patch rsync bash zstd pkgconf";
171171+ {|echo "opam ALL=(ALL:ALL) NOPASSWD:ALL" > /usr/local/etc/sudoers.d/opam|};
172172+ ]
173173+ in
174174+ let result = Os.sudo ~stdout:build_log (jail ~temp_dir ~rootfs ~mounts:[] ~env:[] ~argv ~network:true ~username:None) in
175175+ let () = if result = 0 then ignore (Os.sudo [ "umount"; Path.(rootfs / "dev") ]) in
176176+ let _ = Os.sudo [ "chflags"; "-R"; "0"; rootfs ] in
177177+ let argv =
178178+ [ "touch /home/opam/.hushlogin"; "opam init -k local -a /home/opam/opam-repository --bare --disable-sandboxing -y"; "opam switch create default --empty" ]
179179+ in
180180+ let mounts = [ { Mount.ty = "nullfs"; src = opam_repository; dst = Path.("fs" / "home" / "opam" / "opam-repository"); options = [ "ro" ] } ] in
181181+ let result = Os.sudo ~stdout:build_log (jail ~temp_dir ~rootfs ~mounts ~env ~argv ~network:true ~username:(Some "opam")) in
182182+ let () =
183183+ if result = 0 then (
184184+ ignore (Os.sudo [ "umount"; Path.(rootfs / "dev") ]);
185185+ ignore (Os.sudo [ "umount"; "-a"; "-f"; "-F"; Path.(temp_dir / "fstab") ]))
186186+ in
187187+ let _ = Os.sudo [ "rm"; "-rf"; Path.(rootfs / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "environment") ] in
188188+ let _ = Os.sudo [ "sh"; "-c"; ("rm -f " ^ Path.(rootfs / "home" / "opam" / ".opam" / "repo" / "state-*.cache")) ] in
189189+ let () = Os.write_to_file Path.(temp_dir / "status") (string_of_int result) in
190190+ result
191191+192192+let build ~t ~temp_dir build_log pkg ordered_hashes =
193193+ let config = t.config in
194194+ let os_key = Config.os_key ~config in
195195+ let lowerdir = Path.(temp_dir / "lower") in
196196+ let upperdir = Path.(temp_dir / "fs") in
197197+ let workdir = Path.(temp_dir / "work") in
198198+ let () = List.iter Os.mkdir [ lowerdir; upperdir; workdir ] in
199199+ let pin = if OpamPackage.name_to_string pkg = config.package then [ "opam pin -yn " ^ OpamPackage.to_string pkg ^ " $HOME/src/"; "cd src" ] else [] in
200200+ let with_test = if config.with_test then "--with-test " else "" in
201201+ let argv = pin @ [ "opam-build -v " ^ with_test ^ OpamPackage.to_string pkg ] in
202202+ let () =
203203+ List.iter
204204+ (fun hash ->
205205+ (* no directory target option on FreeBSD cp *)
206206+ let dir = Path.(config.dir / os_key / hash / "fs") in
207207+ let dirs = Sys.readdir dir |> Array.to_list |> List.map (fun d -> Path.(dir / d)) in
208208+ ignore (Os.sudo ([ "cp"; "-n"; "-a"; "-R"; "-l" ] @ dirs @ [ lowerdir ])))
209209+ (ordered_hashes @ [ "base" ])
210210+ in
211211+ let () =
212212+ let packages_dir = Path.(lowerdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "packages") in
213213+ let state_file = Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "switch-state") in
214214+ if Sys.file_exists packages_dir then Opamh.dump_state packages_dir state_file
215215+ in
216216+ let mounts =
217217+ [
218218+ { Mount.ty = "nullfs"; src = lowerdir; dst = "work"; options = [ "ro" ] };
219219+ { Mount.ty = "unionfs"; src = upperdir; dst = "work"; options = [ "rw" ] };
220220+ { ty = "nullfs"; src = Path.(temp_dir / "opam-repository"); dst = Path.("work" / "home" / "opam" / ".opam" / "repo" / "default"); options = [ "ro" ] };
221221+ ]
222222+ in
223223+ let mounts =
224224+ match config.directory with
225225+ | None -> mounts
226226+ | Some src -> mounts @ [ { ty = "nullfs"; src; dst = Path.("work" / "home" / "opam" / "src"); options = [ "rw" ] } ]
227227+ in
228228+ let result = Os.sudo ~stdout:build_log (jail ~temp_dir ~rootfs:workdir ~mounts ~env ~argv ~network:true ~username:(Some "opam")) in
229229+ let () =
230230+ if result = 0 then (
231231+ ignore (Os.sudo [ "umount"; Path.(workdir / "dev") ]);
232232+ ignore (Os.sudo [ "umount"; "-a"; "-f"; "-F"; Path.(temp_dir / "fstab") ]))
233233+ in
234234+ let _ =
235235+ Os.sudo
236236+ [
237237+ "rm";
238238+ "-rf";
239239+ lowerdir;
240240+ workdir;
241241+ Path.(upperdir / "tmp");
242242+ Path.(upperdir / "home" / "opam" / "default" / ".opam-switch" / "sources");
243243+ Path.(upperdir / "home" / "opam" / "default" / ".opam-switch" / "build");
244244+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "packages" / "cache");
245245+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "environment");
246246+ ]
247247+ in
248248+ let _ = Os.sudo [ "sh"; "-c"; ("rm -f " ^ Path.(upperdir / "home" / "opam" / ".opam" / "repo" / "state-*.cache")) ] in
249249+ result
250250+251251+let doc_layer_hash ~t:_ ~build_hash:_ ~dep_doc_hashes:_ ~ocaml_version:_ ~blessed:_ = ""
252252+253253+(* Documentation generation not supported on FreeBSD *)
254254+let generate_docs ~t:_ ~build_layer_dir:_ ~doc_layer_dir:_ ~dep_doc_hashes:_ ~pkg:_ ~installed_libs:_ ~installed_docs:_ ~phase:_ ~ocaml_version:_ = None
255255+256256+let jtw_layer_hash ~t:_ ~build_hash:_ ~ocaml_version:_ = ""
257257+258258+(* JTW generation not supported on FreeBSD *)
259259+let generate_jtw ~t:_ ~build_layer_dir:_ ~jtw_layer_dir:_ ~dep_build_hashes:_ ~pkg:_ ~installed_libs:_ ~ocaml_version:_ = None
+50
day10/bin/json_layers.ml
···11+type parent_layer_paths = { parentLayerPaths : string list [@key "parentLayerPaths"] } [@@deriving yojson]
22+33+type layer = {
44+ type_ : string;
55+ source : string;
66+ target : string;
77+ options : parent_layer_paths list;
88+}
99+1010+type raw_layer = {
1111+ raw_type_ : string; [@key "Type"]
1212+ raw_source : string; [@key "Source"]
1313+ raw_target : string; [@key "Target"]
1414+ raw_options : string list; [@key "Options"]
1515+}
1616+[@@deriving yojson]
1717+1818+let parse_option_string str =
1919+ if String.starts_with ~prefix:"parentLayerPaths=" str then
2020+ try
2121+ let json_part = String.sub str 17 (String.length str - 17) in
2222+ let full_json = "{\"parentLayerPaths\":" ^ json_part ^ "}" in
2323+ Yojson.Safe.from_string full_json |> parent_layer_paths_of_yojson |> Result.to_option
2424+ with
2525+ | _ -> None
2626+ else None
2727+2828+let layer_of_raw (raw_layer : raw_layer) =
2929+ {
3030+ type_ = raw_layer.raw_type_;
3131+ source = raw_layer.raw_source;
3232+ target = raw_layer.raw_target;
3333+ options = List.filter_map parse_option_string raw_layer.raw_options;
3434+ }
3535+3636+let layers_of_yojson json =
3737+ match [%of_yojson: raw_layer list] json with
3838+ | Ok raw_layers -> Ok (List.map layer_of_raw raw_layers)
3939+ | Error e -> Error e
4040+4141+let parse_layers json_string =
4242+ let json = Yojson.Safe.from_string json_string in
4343+ layers_of_yojson json
4444+4545+let read_layers path =
4646+ let mounts = Os.read_from_file path in
4747+ match parse_layers mounts with
4848+ | Ok layers ->
4949+ (layers |> List.map (fun l -> List.map (fun x -> x.parentLayerPaths) l.options) |> List.flatten |> List.flatten) @ List.map (fun l -> l.source) layers
5050+ | Error _ -> []
+14
day10/bin/json_packages.ml
···11+type package_list = {
22+ packages: string list;
33+} [@@deriving yojson]
44+55+let read_packages filename =
66+ let json = Yojson.Safe.from_file filename in
77+ match package_list_of_yojson json with
88+ | Ok { packages } -> packages
99+ | Error msg -> failwith (Printf.sprintf "Failed to parse package list from %s: %s" filename msg)
1010+1111+let write_packages filename packages =
1212+ let package_list = { packages } in
1313+ let json = package_list_to_yojson package_list in
1414+ Yojson.Safe.to_file filename json
+337
day10/bin/jtw_gen.ml
···11+(** JTW generation logic: compile .cma to .cma.js, extract .cmi, META,
22+ generate dynamic_cmis.json, assemble universe output directories. *)
33+44+(** Compute hash for a jtw layer.
55+ Depends on the build hash and jtw-tools layer hash. *)
66+let compute_jtw_layer_hash ~build_hash ~jtw_tools_hash =
77+ (build_hash ^ " " ^ jtw_tools_hash) |> Digest.string |> Digest.to_hex
88+99+(** Generate the dynamic_cmis.json content for a directory of .cmi files.
1010+ [dcs_url] is the URL path prefix for the directory.
1111+ Returns the JSON string. *)
1212+let generate_dynamic_cmis_json ~dcs_url cmi_filenames =
1313+ (* Strip .cmi extension *)
1414+ let all_cmis = List.map (fun s ->
1515+ if Filename.check_suffix s ".cmi"
1616+ then String.sub s 0 (String.length s - 4)
1717+ else s
1818+ ) cmi_filenames in
1919+ (* Partition into hidden (contains __) and non-hidden modules *)
2020+ let hidden, non_hidden = List.partition (fun x ->
2121+ try let _ = Str.search_forward (Str.regexp_string "__") x 0 in true
2222+ with Not_found -> false
2323+ ) all_cmis in
2424+ (* Extract prefixes from hidden modules *)
2525+ let prefixes = List.filter_map (fun x ->
2626+ match String.split_on_char '_' x with
2727+ | [] -> None
2828+ | _ ->
2929+ try
3030+ let pos = Str.search_forward (Str.regexp_string "__") x 0 in
3131+ Some (String.sub x 0 (pos + 2))
3232+ with Not_found -> None
3333+ ) hidden in
3434+ let prefixes = List.sort_uniq String.compare prefixes in
3535+ let toplevel_modules = List.map String.capitalize_ascii non_hidden
3636+ |> List.sort String.compare in
3737+ (* Build JSON manually to avoid dependency on rpclib *)
3838+ let json_list xs = "[" ^ String.concat "," (List.map (fun s -> Printf.sprintf "%S" s) xs) ^ "]" in
3939+ Printf.sprintf {|{"dcs_url":%S,"dcs_toplevel_modules":%s,"dcs_file_prefixes":%s}|}
4040+ dcs_url (json_list toplevel_modules) (json_list prefixes)
4141+4242+(** Generate findlib_index JSON for a universe.
4343+ [meta_paths] is a list of relative META paths (e.g., "../../p/hmap/0.8.1/<hash>/lib/hmap/META").
4444+ [compiler] is a JSON object with compiler info (version, content_hash). *)
4545+let generate_findlib_index ~compiler meta_paths =
4646+ let metas = List.map (fun p -> `String p) meta_paths in
4747+ Yojson.Safe.to_string (`Assoc [
4848+ ("compiler", compiler);
4949+ ("metas", `List metas);
5050+ ])
5151+5252+(** Recursively collect files matching a predicate, sorted by relative path. *)
5353+let collect_files_sorted ~base ~pred =
5454+ let files = ref [] in
5555+ let rec walk rel =
5656+ let full = if rel = "" then base else Path.(base / rel) in
5757+ if Sys.file_exists full && Sys.is_directory full then begin
5858+ let entries = try Sys.readdir full |> Array.to_list with _ -> [] in
5959+ let entries = List.sort String.compare entries in
6060+ List.iter (fun name ->
6161+ let sub = if rel = "" then name else rel ^ "/" ^ name in
6262+ walk sub
6363+ ) entries
6464+ end else if pred rel then
6565+ files := rel :: !files
6666+ in
6767+ walk "";
6868+ List.rev !files
6969+7070+(** Compute content hash from payload files in a directory.
7171+ Hashes .cmi, .cma.js, and META files (sorted by relative path).
7272+ Returns first 16 hex chars of MD5. *)
7373+let compute_content_hash lib_dir =
7474+ let is_payload f =
7575+ Filename.check_suffix f ".cmi"
7676+ || Filename.check_suffix f ".cma.js"
7777+ || Filename.basename f = "META"
7878+ in
7979+ let files = collect_files_sorted ~base:lib_dir ~pred:is_payload in
8080+ let buf = Buffer.create 4096 in
8181+ List.iter (fun rel ->
8282+ Buffer.add_string buf rel;
8383+ Buffer.add_char buf '\000';
8484+ let content = Os.read_from_file Path.(lib_dir / rel) in
8585+ Buffer.add_string buf content;
8686+ Buffer.add_char buf '\000';
8787+ ) files;
8888+ let hash = Digest.to_hex (Digest.string (Buffer.contents buf)) in
8989+ String.sub hash 0 16
9090+9191+(** Compute content hash for the compiler (worker.js + stdlib .cmi files).
9292+ Returns first 16 hex chars of MD5. *)
9393+let compute_compiler_content_hash tools_output_dir =
9494+ let buf = Buffer.create 4096 in
9595+ (* Hash worker.js *)
9696+ let worker_path = Path.(tools_output_dir / "worker.js") in
9797+ if Sys.file_exists worker_path then begin
9898+ Buffer.add_string buf "worker.js";
9999+ Buffer.add_char buf '\000';
100100+ Buffer.add_string buf (Os.read_from_file worker_path);
101101+ Buffer.add_char buf '\000'
102102+ end;
103103+ (* Hash stdlib .cmi files *)
104104+ let lib_dir = Path.(tools_output_dir / "lib") in
105105+ if Sys.file_exists lib_dir then begin
106106+ let is_cmi f = Filename.check_suffix f ".cmi" in
107107+ let files = collect_files_sorted ~base:lib_dir ~pred:is_cmi in
108108+ List.iter (fun rel ->
109109+ Buffer.add_string buf ("lib/" ^ rel);
110110+ Buffer.add_char buf '\000';
111111+ Buffer.add_string buf (Os.read_from_file Path.(lib_dir / rel));
112112+ Buffer.add_char buf '\000';
113113+ ) files
114114+ end;
115115+ let hash = Digest.to_hex (Digest.string (Buffer.contents buf)) in
116116+ String.sub hash 0 16
117117+118118+(** The shell command to compile a .cma to .cma.js inside a container.
119119+ Returns a command string suitable for bash -c. *)
120120+let jsoo_compile_command ~cma_path ~output_path ~js_stubs =
121121+ let stubs = String.concat " " (List.map Filename.quote js_stubs) in
122122+ Printf.sprintf "js_of_ocaml compile --toplevel --include-runtime --effects=disabled %s %s -o %s"
123123+ stubs (Filename.quote cma_path) (Filename.quote output_path)
124124+125125+(** Build the shell script to run inside the container for jtw generation.
126126+ This compiles all .cma files found in the package's lib directory. *)
127127+let jtw_container_script ~pkg ~installed_libs =
128128+ let pkg_name = OpamPackage.name_to_string pkg in
129129+ let lib_base = "/home/opam/.opam/default/lib" in
130130+ (* Find .cma files from installed_libs *)
131131+ let cma_files = List.filter (fun f -> Filename.check_suffix f ".cma") installed_libs in
132132+ if cma_files = [] then
133133+ (* No .cma files - just exit success, we'll still copy .cmi and META *)
134134+ "true"
135135+ else begin
136136+ let compile_cmds = List.map (fun cma_rel ->
137137+ let cma_path = lib_base ^ "/" ^ cma_rel in
138138+ let js_output = "/home/opam/jtw-output/lib/" ^ cma_rel ^ ".js" in
139139+ let js_dir = Filename.dirname js_output in
140140+ (* Look for jsoo runtime stubs in the same directory as the .cma *)
141141+ let cma_dir = Filename.dirname cma_path in
142142+ Printf.sprintf "mkdir -p %s && js_stubs=$(find %s -name '*.js' -not -name '*.cma.js' 2>/dev/null | sort | tr '\\n' ' ') && js_of_ocaml compile --toplevel --include-runtime --effects=disabled $js_stubs %s -o %s"
143143+ (Filename.quote js_dir) (Filename.quote cma_dir) (Filename.quote cma_path) (Filename.quote js_output)
144144+ ) cma_files in
145145+ let script = String.concat " && " (
146146+ ["eval $(opam env)";
147147+ Printf.sprintf "echo 'JTW: Compiling %s (%d archives)'" pkg_name (List.length cma_files)]
148148+ @ compile_cmds
149149+ @ ["echo 'JTW: Done'"]
150150+ ) in
151151+ script
152152+ end
153153+154154+(** Assemble the jtw output directory structure from completed jtw layers.
155155+156156+ Output structure (content-hashed paths for immutable caching):
157157+ {v
158158+ <jtw_output>/
159159+ compiler/<ocaml-version>/<compiler-hash>/
160160+ worker.js
161161+ lib/ocaml/
162162+ dynamic_cmis.json
163163+ stdlib.cmi, ...
164164+ p/<package>/<version>/<content-hash>/
165165+ lib/<findlib-name>/
166166+ META, dynamic_cmis.json, *.cmi, *.cma.js
167167+ u/<universe-hash>/
168168+ findlib_index (JSON: compiler info + META paths to ../../p/...)
169169+ v}
170170+171171+ The findlib_index is the single entry point for clients. It contains:
172172+ - compiler.version and compiler.content_hash (for constructing worker URL)
173173+ - metas: list of relative META file paths (pointing into p/) *)
174174+let assemble_jtw_output ~config ~jtw_output ~ocaml_version ~solutions ~blessed_maps:_ =
175175+ let os_key = Config.os_key ~config in
176176+ let ocaml_ver = OpamPackage.Version.to_string (OpamPackage.version ocaml_version) in
177177+178178+ (* Step 1: Compute compiler content hash, copy to compiler/<ver>/<whash>/ *)
179179+ let jtw_tools_dir = Jtw_tools.layer_path ~config ~ocaml_version in
180180+ let tools_output = Path.(jtw_tools_dir / "fs" / "home" / "opam" / "jtw-tools-output") in
181181+ let compiler_hash = compute_compiler_content_hash tools_output in
182182+ let compiler_dir = Path.(jtw_output / "compiler" / ocaml_ver / compiler_hash) in
183183+ Os.mkdir ~parents:true compiler_dir;
184184+ let worker_src = Path.(tools_output / "worker.js") in
185185+ if Sys.file_exists worker_src then
186186+ Os.cp worker_src Path.(compiler_dir / "worker.js");
187187+ (* Copy stdlib lib directory from jtw-tools output *)
188188+ let stdlib_src = Path.(tools_output / "lib") in
189189+ if Sys.file_exists stdlib_src then begin
190190+ let stdlib_dst = Path.(compiler_dir / "lib") in
191191+ Os.mkdir ~parents:true stdlib_dst;
192192+ ignore (Os.sudo ["cp"; "-a"; "--no-target-directory"; stdlib_src; stdlib_dst])
193193+ end;
194194+195195+ (* Step 2: For each solution, assemble universe directories *)
196196+ List.iter (fun (_target_pkg, solution) ->
197197+ let ordered = List.map fst (OpamPackage.Map.bindings solution) in
198198+ (* Compute universe hash from build hashes of all packages in solution *)
199199+ let build_hashes = List.filter_map (fun pkg ->
200200+ let pkg_str = OpamPackage.to_string pkg in
201201+ let pkg_dir = Path.(config.dir / os_key / "packages" / pkg_str) in
202202+ if Sys.file_exists pkg_dir then begin
203203+ try
204204+ Sys.readdir pkg_dir |> Array.to_list
205205+ |> List.find_opt (fun name -> String.length name > 6 && String.sub name 0 6 = "build-")
206206+ with _ -> None
207207+ end else None
208208+ ) ordered in
209209+ let universe = Odoc_gen.compute_universe_hash build_hashes in
210210+211211+ (* Collect META paths for findlib_index *)
212212+ let meta_paths = ref [] in
213213+214214+ List.iter (fun pkg ->
215215+ let pkg_name = OpamPackage.name_to_string pkg in
216216+ let pkg_version = OpamPackage.version_to_string pkg in
217217+ let pkg_str = OpamPackage.to_string pkg in
218218+219219+ (* Find jtw layer for this package *)
220220+ let pkg_layers_dir = Path.(config.dir / os_key / "packages" / pkg_str) in
221221+ let jtw_layer_name =
222222+ if Sys.file_exists pkg_layers_dir then
223223+ try
224224+ Sys.readdir pkg_layers_dir |> Array.to_list
225225+ |> List.find_opt (fun name -> String.length name > 4 && String.sub name 0 4 = "jtw-")
226226+ with _ -> None
227227+ else None
228228+ in
229229+230230+ match jtw_layer_name with
231231+ | None -> ()
232232+ | Some jtw_name ->
233233+ let jtw_layer_dir = Path.(config.dir / os_key / jtw_name) in
234234+ let jtw_lib_src = Path.(jtw_layer_dir / "lib") in
235235+ if Sys.file_exists jtw_lib_src then begin
236236+ (* Compute content hash from payload files in the jtw layer *)
237237+ let content_hash = compute_content_hash jtw_lib_src in
238238+239239+ (* Copy to content-hashed path: p/<pkg>/<ver>/<hash>/lib/ *)
240240+ let p_pkg_dir = Path.(jtw_output / "p" / pkg_name / pkg_version / content_hash) in
241241+ let p_lib_dst = Path.(p_pkg_dir / "lib") in
242242+ if not (Sys.file_exists p_lib_dst) then begin
243243+ Os.mkdir ~parents:true p_lib_dst;
244244+ ignore (Os.sudo ["cp"; "-a"; "--no-target-directory"; jtw_lib_src; p_lib_dst])
245245+ end;
246246+247247+ (* Rewrite dynamic_cmis.json files with dcs_url relative to compiler/<ver>/<whash>/ *)
248248+ (* The worker resolves dcs_url relative to its base URL (compiler/<ver>/<whash>/).
249249+ We need ../../../p/<pkg>/<ver>/<chash>/lib/<rel> to navigate there.
250250+ 3 levels up: <whash> -> <ver> -> compiler -> root, then into p/... *)
251251+ let rec rewrite_dcs_urls base rel =
252252+ let full = if rel = "" then base else Path.(base / rel) in
253253+ if Sys.file_exists full && Sys.is_directory full then begin
254254+ let entries = try Sys.readdir full |> Array.to_list with _ -> [] in
255255+ let entries = List.sort String.compare entries in
256256+ let cmi_files = List.filter (fun f -> Filename.check_suffix f ".cmi") entries in
257257+ if cmi_files <> [] then begin
258258+ let cmi_files = List.sort String.compare cmi_files in
259259+ let new_dcs_url = Printf.sprintf "../../../p/%s/%s/%s/lib/%s"
260260+ pkg_name pkg_version content_hash (if rel = "" then "" else rel) in
261261+ let dcs_json = generate_dynamic_cmis_json ~dcs_url:new_dcs_url cmi_files in
262262+ Os.write_to_file Path.(full / "dynamic_cmis.json") dcs_json
263263+ end;
264264+ List.iter (fun name ->
265265+ let sub = if rel = "" then name else rel ^ "/" ^ name in
266266+ let sub_full = Path.(base / sub) in
267267+ if Sys.file_exists sub_full && Sys.is_directory sub_full then
268268+ rewrite_dcs_urls base sub
269269+ ) entries
270270+ end
271271+ in
272272+ rewrite_dcs_urls p_lib_dst "";
273273+274274+ (* Collect META paths pointing to content-hashed p/ paths *)
275275+ (try
276276+ let rec find_metas base rel =
277277+ let full = Path.(base / rel) in
278278+ if Sys.is_directory full then begin
279279+ let entries = Sys.readdir full |> Array.to_list
280280+ |> List.sort String.compare in
281281+ List.iter (fun name ->
282282+ find_metas base (if rel = "" then name else rel ^ "/" ^ name)
283283+ ) entries
284284+ end else if Filename.basename rel = "META" then
285285+ (* Path from u/<universe>/ to p/<pkg>/<ver>/<hash>/lib/<fl>/META *)
286286+ meta_paths :=
287287+ ("../../p/" ^ pkg_name ^ "/" ^ pkg_version ^ "/" ^ content_hash ^
288288+ "/lib/" ^ rel) :: !meta_paths
289289+ in
290290+ find_metas jtw_lib_src ""
291291+ with _ -> ());
292292+293293+ end
294294+ ) ordered;
295295+296296+ (* Write findlib_index for this universe *)
297297+ let sorted_metas = List.sort String.compare !meta_paths in
298298+ if sorted_metas <> [] then begin
299299+ let u_dir = Path.(jtw_output / "u" / universe) in
300300+ Os.mkdir ~parents:true u_dir;
301301+ let compiler_json = `Assoc [
302302+ ("version", `String ocaml_ver);
303303+ ("content_hash", `String compiler_hash);
304304+ ] in
305305+ let findlib_index = generate_findlib_index ~compiler:compiler_json sorted_metas in
306306+ Os.write_to_file Path.(u_dir / "findlib_index") findlib_index
307307+ end
308308+ ) solutions
309309+310310+(** Save jtw layer info to layer.json *)
311311+let save_jtw_layer_info ?jtw_result layer_json_path pkg ~build_hash =
312312+ let fields =
313313+ [
314314+ ("package", `String (OpamPackage.to_string pkg));
315315+ ("build_hash", `String build_hash);
316316+ ("created", `Float (Unix.time ()));
317317+ ]
318318+ in
319319+ let fields = match jtw_result with
320320+ | None -> fields
321321+ | Some result -> fields @ [ ("jtw", result) ]
322322+ in
323323+ Yojson.Safe.to_file layer_json_path (`Assoc fields)
324324+325325+(** JTW result types *)
326326+type jtw_result =
327327+ | Jtw_success
328328+ | Jtw_failure of string
329329+ | Jtw_skipped
330330+331331+let jtw_result_to_yojson = function
332332+ | Jtw_success ->
333333+ `Assoc [("status", `String "success")]
334334+ | Jtw_failure msg ->
335335+ `Assoc [("status", `String "failure"); ("error", `String msg)]
336336+ | Jtw_skipped ->
337337+ `Assoc [("status", `String "skipped")]
+65
day10/bin/jtw_tools.ml
···11+(** JTW tools layer management for js_of_ocaml + js_top_worker toolchain.
22+33+ Per OCaml version: installs js_of_ocaml and js_top_worker (pinned from
44+ a git repo), builds worker.js, and extracts stdlib cmis + dynamic_cmis.json.
55+66+ Cached as jtw-tools-{hash}/ per OCaml version + repo + branch. *)
77+88+(** Compute hash for the jtw-tools layer.
99+ Depends on OCaml version, repo, and branch. *)
1010+let layer_hash ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
1111+ let version = OpamPackage.Version.to_string (OpamPackage.version ocaml_version) in
1212+ let components = [ "jtw-tools"; version; config.jtw_tools_repo; config.jtw_tools_branch ] in
1313+ String.concat "|" components |> Digest.string |> Digest.to_hex
1414+1515+(** Directory name for the jtw-tools layer *)
1616+let layer_name ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
1717+ "jtw-tools-" ^ layer_hash ~config ~ocaml_version
1818+1919+(** Full path to the jtw-tools layer *)
2020+let layer_path ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
2121+ let os_key = Config.os_key ~config in
2222+ Path.(config.dir / os_key / layer_name ~config ~ocaml_version)
2323+2424+(** Generate build script for the jtw-tools layer.
2525+ Pins js_top_worker packages from the configured repo/branch,
2626+ installs js_of_ocaml and js_top_worker-bin, then builds worker.js
2727+ and extracts stdlib cmis. *)
2828+let build_script ~(config : Config.t) ~(ocaml_version : OpamPackage.t) =
2929+ let version = OpamPackage.Version.to_string (OpamPackage.version ocaml_version) in
3030+ let repo = config.jtw_tools_repo in
3131+ let branch = config.jtw_tools_branch in
3232+ String.concat " && "
3333+ [
3434+ Printf.sprintf "opam install -y ocaml-base-compiler.%s" version;
3535+ (* Pin all js_top_worker packages from the repo *)
3636+ Printf.sprintf "opam pin add -yn js_top_worker %s#%s" repo branch;
3737+ Printf.sprintf "opam pin add -yn js_top_worker-rpc %s#%s" repo branch;
3838+ Printf.sprintf "opam pin add -yn js_top_worker-bin %s#%s" repo branch;
3939+ Printf.sprintf "opam pin add -yn js_top_worker-web %s#%s" repo branch;
4040+ Printf.sprintf "opam pin add -yn js_top_worker_rpc_def %s#%s" repo branch;
4141+ (* Install js_of_ocaml, jtw CLI, and web worker library *)
4242+ "opam install -y js_of_ocaml js_top_worker-bin js_top_worker-web";
4343+ (* Verify tools are installed *)
4444+ "eval $(opam env) && which js_of_ocaml && which jtw";
4545+ (* Build worker.js + stdlib cmis/dynamic_cmis.json in one step *)
4646+ "eval $(opam env) && jtw opam -o /home/opam/jtw-tools-output stdlib";
4747+ ]
4848+4949+(** Check if jtw-tools layer exists for this OCaml version *)
5050+let exists ~(config : Config.t) ~(ocaml_version : OpamPackage.t) : bool =
5151+ Sys.file_exists (layer_path ~config ~ocaml_version)
5252+5353+(** Get the hash/name for the jtw-tools layer *)
5454+let get_hash ~(config : Config.t) ~(ocaml_version : OpamPackage.t) : string =
5555+ layer_name ~config ~ocaml_version
5656+5757+(** Check if js_of_ocaml is available in the jtw-tools layer *)
5858+let has_jsoo ~(config : Config.t) ~(ocaml_version : OpamPackage.t) : bool =
5959+ let jsoo_path = Path.(layer_path ~config ~ocaml_version / "fs" / "home" / "opam" / ".opam" / "default" / "bin" / "js_of_ocaml") in
6060+ Sys.file_exists jsoo_path
6161+6262+(** Check if worker.js was built in the jtw-tools layer *)
6363+let has_worker_js ~(config : Config.t) ~(ocaml_version : OpamPackage.t) : bool =
6464+ let worker_path = Path.(layer_path ~config ~ocaml_version / "fs" / "home" / "opam" / "jtw-tools-output" / "worker.js") in
6565+ Sys.file_exists worker_path
+977
day10/bin/linux.ml
···11+type t = {
22+ config : Config.t;
33+ uid : int;
44+ gid : int;
55+}
66+77+let hostname = "builder"
88+99+let env =
1010+ [
1111+ ("PATH", "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin");
1212+ ("HOME", "/home/opam");
1313+ ("OPAMYES", "1");
1414+ ("OPAMCONFIRMLEVEL", "unsafe-yes");
1515+ ("OPAMERRLOGLEN", "0");
1616+ ("OPAMPRECISETRACKING", "1");
1717+ ]
1818+1919+(* This is a subset of the capabilities that Docker uses by default.
2020+ These control what root can do in the container.
2121+ If the init process is non-root, permitted, effective and ambient sets are cleared.
2222+ See capabilities(7) for full details. *)
2323+let default_linux_caps =
2424+ [
2525+ (* Make arbitrary changes to file UIDs and GIDs *)
2626+ "CAP_CHOWN";
2727+ (* Bypass file read, write, and execute permission checks. *)
2828+ "CAP_DAC_OVERRIDE";
2929+ (* Set SUID/SGID bits. *)
3030+ "CAP_FSETID";
3131+ (* Bypass permission checks. *)
3232+ "CAP_FOWNER";
3333+ (* Create special files using mknod. *)
3434+ "CAP_MKNOD";
3535+ (* Make arbitrary manipulations of process GIDs. *)
3636+ "CAP_SETGID";
3737+ (* Make arbitrary manipulations of process UIDs. *)
3838+ "CAP_SETUID";
3939+ (* Set arbitrary capabilities on a file. *)
4040+ "CAP_SETFCAP";
4141+ (* Add any capability from bounding set to inheritable set. *)
4242+ "CAP_SETPCAP";
4343+ (* Use chroot. *)
4444+ "CAP_SYS_CHROOT";
4545+ (* Bypass permission checks for sending signals. *)
4646+ "CAP_KILL";
4747+ (* Write records to kernel auditing log. *)
4848+ "CAP_AUDIT_WRITE";
4949+ ]
5050+5151+let strings xs = `List (List.map (fun x -> `String x) xs)
5252+5353+let make ~root ~cwd ~argv ~hostname ~uid ~gid ~env ~mounts ~network : Yojson.Safe.t =
5454+ `Assoc
5555+ [
5656+ ("ociVersion", `String "1.0.1-dev");
5757+ ( "process",
5858+ `Assoc
5959+ [
6060+ ("terminal", `Bool false);
6161+ ("user", `Assoc [ ("uid", `Int uid); ("gid", `Int gid) ]);
6262+ ("args", strings argv);
6363+ ("env", strings (List.map (fun (k, v) -> Printf.sprintf "%s=%s" k v) env));
6464+ ("cwd", `String cwd);
6565+ ( "capabilities",
6666+ `Assoc
6767+ [
6868+ (* Limits capabilities gained on execve. *)
6969+ ("bounding", strings default_linux_caps);
7070+ (* Checked by kernel to decide access *)
7171+ ("effective", strings default_linux_caps);
7272+ (* Preserved across an execve (if root, or cap in ambient set) *)
7373+ ("inheritable", strings default_linux_caps);
7474+ (* Limiting superset for the effective capabilities *)
7575+ ("permitted", strings default_linux_caps);
7676+ ] );
7777+ ("rlimits", `List [ `Assoc [ ("type", `String "RLIMIT_NOFILE"); ("hard", `Int 1024); ("soft", `Int 1024) ] ]);
7878+ ("noNewPrivileges", `Bool false);
7979+ ] );
8080+ ("root", `Assoc [ ("path", `String root); ("readonly", `Bool false) ]);
8181+ ("hostname", `String hostname);
8282+ ( "mounts",
8383+ `List
8484+ (Mount.user_mounts mounts
8585+ @ [
8686+ Mount.make "/proc" ~options:[ (* TODO: copy to others? *) "nosuid"; "noexec"; "nodev" ] ~ty:"proc" ~src:"proc";
8787+ Mount.make "/tmp" ~ty:"tmpfs" ~src:"tmpfs" ~options:[ "nosuid"; "noatime"; "nodev"; "noexec"; "mode=1777" ];
8888+ Mount.make "/dev" ~ty:"tmpfs" ~src:"tmpfs" ~options:[ "nosuid"; "strictatime"; "mode=755"; "size=65536k" ];
8989+ Mount.make "/dev/pts" ~ty:"devpts" ~src:"devpts" ~options:[ "nosuid"; "noexec"; "newinstance"; "ptmxmode=0666"; "mode=0620"; "gid=5" (* tty *) ];
9090+ Mount.make "/sys" (* This is how Docker does it. runc's default is a bit different. *) ~ty:"sysfs" ~src:"sysfs"
9191+ ~options:[ "nosuid"; "noexec"; "nodev"; "ro" ];
9292+ Mount.make "/sys/fs/cgroup" ~ty:"cgroup" ~src:"cgroup" ~options:[ "ro"; "nosuid"; "noexec"; "nodev" ];
9393+ Mount.make "/dev/shm" ~ty:"tmpfs" ~src:"shm" ~options:[ "nosuid"; "noexec"; "nodev"; "mode=1777"; "size=65536k" ];
9494+ Mount.make "/dev/mqueue" ~ty:"mqueue" ~src:"mqueue" ~options:[ "nosuid"; "noexec"; "nodev" ];
9595+ ]
9696+ @ if network then [ Mount.make "/etc/resolv.conf" ~ty:"bind" ~src:"/etc/resolv.conf" ~options:[ "ro"; "rbind"; "rprivate" ] ] else []) );
9797+ ( "linux",
9898+ `Assoc
9999+ [
100100+ ( "namespaces",
101101+ `List
102102+ (List.map
103103+ (fun namespace -> `Assoc [ ("type", `String namespace) ])
104104+ ((if network then [] else [ "network" ]) @ [ "pid"; "ipc"; "uts"; "mount" ])) );
105105+ ( "maskedPaths",
106106+ strings
107107+ [
108108+ "/proc/acpi";
109109+ "/proc/asound";
110110+ "/proc/kcore";
111111+ "/proc/keys";
112112+ "/proc/latency_stats";
113113+ "/proc/timer_list";
114114+ "/proc/timer_stats";
115115+ "/proc/sched_debug";
116116+ "/sys/firmware";
117117+ "/proc/scsi";
118118+ ] );
119119+ ("readonlyPaths", strings [ "/proc/bus"; "/proc/fs"; "/proc/irq"; "/proc/sys"; "/proc/sysrq-trigger" ]);
120120+ ( "seccomp",
121121+ `Assoc
122122+ ([
123123+ ("defaultAction", `String "SCMP_ACT_ALLOW");
124124+ ( "syscalls",
125125+ `List
126126+ [
127127+ `Assoc
128128+ [
129129+ (* Sync calls are pointless for the builder, because if the computer crashes then we'll
130130+ just throw the build dir away and start again. And btrfs sync is really slow.
131131+ Based on https://bblank.thinkmo.de/using-seccomp-to-filter-sync-operations.html
132132+ Note: requires runc >= v1.0.0-rc92. *)
133133+ ("names", strings [ "fsync"; "fdatasync"; "msync"; "sync"; "syncfs"; "sync_file_range" ]);
134134+ ("action", `String "SCMP_ACT_ERRNO");
135135+ ("errnoRet", `Int 0);
136136+ (* Return error "success" *)
137137+ ];
138138+ ] );
139139+ ]
140140+ @ [ ("architectures", strings [ "SCMP_ARCH_X86_64"; "SCMP_ARCH_X86"; "SCMP_ARCH_X32" ]) ]) );
141141+ ] );
142142+ ]
143143+144144+let init ~(config : Config.t) =
145145+ (* If the effective UID is 0 but the actual UID is <> 0 then we have a SUID binary *)
146146+ (* Set the actual UID to 0, as SUID is not inherited *)
147147+ if Unix.geteuid () = 0 && Unix.getuid () <> 0 then Unix.setuid 0;
148148+ if Unix.getegid () = 0 && Unix.getgid () <> 0 then Unix.setgid 0;
149149+ { config; uid = 1000; gid = 1000 }
150150+151151+let deinit ~t:_ = ()
152152+let config ~t = t.config
153153+154154+let layer_hash ~t deps =
155155+ let hashes =
156156+ List.map
157157+ (fun opam ->
158158+ opam |> Util.opam_file t.config.opam_repositories |> Option.get |> OpamFile.OPAM.effective_part |> OpamFile.OPAM.write_to_string
159159+ |> OpamHash.compute_from_string |> OpamHash.to_string)
160160+ deps
161161+ in
162162+ String.concat " " hashes |> Digest.string |> Digest.to_hex
163163+164164+let doc_layer_hash ~t ~build_hash ~dep_doc_hashes ~ocaml_version ~blessed =
165165+ let config = t.config in
166166+ let driver_hash = Doc_tools.get_driver_hash ~config in
167167+ let odoc_hash = Doc_tools.get_odoc_hash ~config ~ocaml_version in
168168+ let blessed_str = if blessed then "blessed" else "universe" in
169169+ let components = build_hash :: dep_doc_hashes @ [ driver_hash; odoc_hash; blessed_str ] in
170170+ String.concat " " components |> Digest.string |> Digest.to_hex
171171+172172+let run ~t ~temp_dir opam_repository build_log =
173173+ let config = t.config in
174174+ match config.os_family with
175175+ | "debian" -> Docker.debian ~config ~temp_dir opam_repository build_log t.uid t.gid
176176+ | os_family ->
177177+ failwith (Printf.sprintf "Unsupported OS family '%s' for Linux container. Currently supported: debian" os_family)
178178+179179+let build ~t ~temp_dir build_log pkg ordered_hashes =
180180+ let config = t.config in
181181+ let os_key = Config.os_key ~config in
182182+ let lowerdir = Path.(temp_dir / "lower") in
183183+ let upperdir = Path.(temp_dir / "fs") in
184184+ let workdir = Path.(temp_dir / "work") in
185185+ let rootfsdir = Path.(temp_dir / "rootfs") in
186186+ let () = List.iter Os.mkdir [ lowerdir; upperdir; workdir; rootfsdir ] in
187187+ let pkg_string = OpamPackage.to_string pkg in
188188+ let pin = if OpamPackage.name_to_string pkg = config.package then [ "opam pin -yn " ^ pkg_string ^ " $HOME/src/"; "cd src" ] else [] in
189189+ let with_test = if config.with_test && OpamPackage.name_to_string pkg = config.package then "--with-test " else "" in
190190+ let argv = [ "/usr/bin/env"; "bash"; "-c"; String.concat " && " (pin @ [ "opam-build -v " ^ with_test ^ pkg_string ]) ] in
191191+ let copy_failed = ref false in
192192+ let () =
193193+ List.iter
194194+ (fun hash ->
195195+ if not !copy_failed then
196196+ let src = Path.(config.dir / os_key / hash / "fs") in
197197+ let r = Os.sudo ~stderr:"/dev/null"
198198+ [
199199+ "cp";
200200+ "-n";
201201+ "--archive";
202202+ "--no-dereference";
203203+ "--recursive";
204204+ "--link";
205205+ "--no-target-directory";
206206+ src;
207207+ lowerdir;
208208+ ] in
209209+ if r <> 0 then copy_failed := true)
210210+ ordered_hashes
211211+ in
212212+ if !copy_failed then 1 else
213213+ let () =
214214+ let packages_dir = Path.(lowerdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "packages") in
215215+ let state_file = Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "switch-state") in
216216+ if Sys.file_exists packages_dir then
217217+ Opamh.dump_state packages_dir state_file
218218+ in
219219+ let () =
220220+ (* Chown /home (not just /home/opam) so overlay permissions are correct *)
221221+ let home_dir = Path.(upperdir / "home") in
222222+ if Sys.file_exists home_dir then ignore (Os.sudo [ "chown"; "-R"; string_of_int t.uid ^ ":" ^ string_of_int t.gid; home_dir ])
223223+ in
224224+ let etc_hosts = Path.(temp_dir / "hosts") in
225225+ let () = Os.write_to_file etc_hosts ("127.0.0.1 localhost " ^ hostname) in
226226+ let ld = "lowerdir=" ^ String.concat ":" [ lowerdir; Path.(config.dir / os_key / "base" / "fs") ] in
227227+ let ud = "upperdir=" ^ upperdir in
228228+ let wd = "workdir=" ^ workdir in
229229+ let mount_result = Os.sudo ~stderr:"/dev/null" [ "mount"; "-t"; "overlay"; "overlay"; rootfsdir; "-o"; String.concat "," [ ld; ud; wd ] ] in
230230+ if mount_result <> 0 then begin
231231+ (* Mount failed - return error instead of trying to run runc *)
232232+ 1
233233+ end else
234234+ let mounts =
235235+ [
236236+ { Mount.ty = "bind"; src = Path.(temp_dir / "opam-repository"); dst = "/home/opam/.opam/repo/default"; options = [ "rbind"; "rprivate" ] };
237237+ { ty = "bind"; src = etc_hosts; dst = "/etc/hosts"; options = [ "ro"; "rbind"; "rprivate" ] };
238238+ ]
239239+ in
240240+ let mounts =
241241+ match config.directory with
242242+ | None -> mounts
243243+ | Some src -> mounts @ [ { ty = "bind"; src; dst = "/home/opam/src"; options = [ "rw"; "rbind"; "rprivate" ] } ]
244244+ in
245245+ let config_runc = make ~root:rootfsdir ~cwd:"/home/opam" ~argv ~hostname ~uid:t.uid ~gid:t.gid ~env ~mounts ~network:true in
246246+ let () = Os.write_to_file Path.(temp_dir / "config.json") (Yojson.Safe.pretty_to_string config_runc) in
247247+ let container_id = Filename.basename temp_dir in
248248+ (* Clean up any stale container with same ID from previous runs *)
249249+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
250250+ let result = Os.sudo ~stdout:build_log ~stderr:build_log [ "runc"; "run"; "-b"; temp_dir; container_id ] in
251251+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
252252+ let _ = Os.sudo ~stderr:"/dev/null" [ "umount"; rootfsdir ] in
253253+ let _ =
254254+ Os.sudo
255255+ [
256256+ "rm";
257257+ "-rf";
258258+ lowerdir;
259259+ workdir;
260260+ rootfsdir;
261261+ Path.(upperdir / "tmp");
262262+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "sources");
263263+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "build");
264264+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "packages" / "cache");
265265+ ]
266266+ in
267267+ let _ = Os.sudo [ "sh"; "-c"; ("rm -f " ^ Path.(upperdir / "home" / "opam" / ".opam" / "repo" / "state-*.cache")) ] in
268268+ result
269269+270270+(** Build a doc tools layer using runc.
271271+ This is built directly on the base layer without any compiler layers.
272272+ Takes a build_script parameter to support both driver and odoc layers.
273273+ Returns the exit status of the build. *)
274274+let build_doc_tools_layer ~t ~temp_dir ~build_script build_log =
275275+ let config = t.config in
276276+ let os_key = Config.os_key ~config in
277277+ let upperdir = Path.(temp_dir / "fs") in
278278+ let workdir = Path.(temp_dir / "work") in
279279+ let rootfsdir = Path.(temp_dir / "rootfs") in
280280+ let () = List.iter Os.mkdir [ upperdir; workdir; rootfsdir ] in
281281+ let argv =
282282+ [ "/usr/bin/env"; "bash"; "-c"; build_script ]
283283+ in
284284+ let etc_hosts = Path.(temp_dir / "hosts") in
285285+ let () = Os.write_to_file etc_hosts ("127.0.0.1 localhost " ^ hostname) in
286286+ (* Build directly on base layer - no compiler layers needed *)
287287+ let ld = "lowerdir=" ^ Path.(config.dir / os_key / "base" / "fs") in
288288+ let ud = "upperdir=" ^ upperdir in
289289+ let wd = "workdir=" ^ workdir in
290290+ let _ =
291291+ Os.sudo ~stderr:"/dev/null" [ "mount"; "-t"; "overlay"; "overlay"; rootfsdir; "-o"; String.concat "," [ ld; ud; wd ] ]
292292+ in
293293+ let mounts =
294294+ [
295295+ {
296296+ Mount.ty = "bind";
297297+ src = Path.(temp_dir / "opam-repository");
298298+ dst = "/home/opam/.opam/repo/default";
299299+ options = [ "rbind"; "rprivate" ];
300300+ };
301301+ { ty = "bind"; src = etc_hosts; dst = "/etc/hosts"; options = [ "ro"; "rbind"; "rprivate" ] };
302302+ ]
303303+ in
304304+ let config_runc =
305305+ make ~root:rootfsdir ~cwd:"/home/opam" ~argv ~hostname ~uid:t.uid
306306+ ~gid:t.gid ~env ~mounts ~network:true
307307+ in
308308+ let () =
309309+ Os.write_to_file Path.(temp_dir / "config.json") (Yojson.Safe.pretty_to_string config_runc)
310310+ in
311311+ let container_id = Filename.basename temp_dir in
312312+ (* Clean up any stale container with same ID from previous runs *)
313313+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
314314+ let result =
315315+ Os.sudo ~stdout:build_log ~stderr:build_log
316316+ [ "runc"; "run"; "-b"; temp_dir; container_id ]
317317+ in
318318+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
319319+ let _ = Os.sudo ~stderr:"/dev/null" [ "umount"; rootfsdir ] in
320320+ let _ =
321321+ Os.sudo
322322+ [
323323+ "rm";
324324+ "-rf";
325325+ workdir;
326326+ rootfsdir;
327327+ Path.(upperdir / "tmp");
328328+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "sources");
329329+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "build");
330330+ Path.(upperdir / "home" / "opam" / ".opam" / "default" / ".opam-switch" / "packages" / "cache");
331331+ ]
332332+ in
333333+ let _ =
334334+ Os.sudo
335335+ [ "sh"; "-c"; "rm -f " ^ Path.(upperdir / "home" / "opam" / ".opam" / "repo" / "state-*.cache") ]
336336+ in
337337+ result
338338+339339+(** Ensure the shared driver layer exists and is built.
340340+ Contains odoc_driver_voodoo, sherlodoc, odoc-md - built once with OCaml 5.x.
341341+ Returns the layer directory path if successful, None if build failed. *)
342342+let ensure_driver_layer ~t : string option =
343343+ let config = t.config in
344344+ let layer_dir = Doc_tools.driver_layer_path ~config in
345345+ let driver_layer_name = Doc_tools.driver_layer_name ~config in
346346+ let layer_json = Path.(layer_dir / "layer.json") in
347347+ let write_layer ~set_temp_log_path target_dir =
348348+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-doc-driver-" "" in
349349+ let build_log = Path.(temp_dir / "build.log") in
350350+ set_temp_log_path build_log;
351351+ let opam_repo_src = List.hd config.opam_repositories in
352352+ let opam_repo = Path.(temp_dir / "opam-repository") in
353353+ Unix.symlink opam_repo_src opam_repo;
354354+ let build_script = Doc_tools.driver_build_script ~config in
355355+ let r = build_doc_tools_layer ~t ~temp_dir ~build_script build_log in
356356+ let () = Os.safe_rename_dir ~marker_file:layer_json temp_dir target_dir in
357357+ let dummy_pkg = OpamPackage.of_string "doc-driver.0" in
358358+ Util.save_layer_info layer_json dummy_pkg [] [] r
359359+ in
360360+ let lock_info = Os.{ cache_dir = config.dir; stage = `Tool; package = "driver"; version = "0"; universe = None; layer_name = Some driver_layer_name } in
361361+ let () =
362362+ if not (Sys.file_exists layer_json) then Os.create_directory_exclusively ~marker_file:layer_json ~lock_info layer_dir write_layer
363363+ in
364364+ let exit_status = Util.load_layer_info_exit_status layer_json in
365365+ if exit_status = 0 then Some layer_dir else None
366366+367367+(** Ensure the per-version odoc layer exists and is built.
368368+ Contains odoc built with the specified OCaml version.
369369+ Returns the layer directory path if successful, None if build failed. *)
370370+let ensure_odoc_layer ~t ~ocaml_version : string option =
371371+ let config = t.config in
372372+ let layer_dir = Doc_tools.odoc_layer_path ~config ~ocaml_version in
373373+ let odoc_layer_name = Doc_tools.odoc_layer_name ~config ~ocaml_version in
374374+ let layer_json = Path.(layer_dir / "layer.json") in
375375+ let write_layer ~set_temp_log_path target_dir =
376376+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-doc-odoc-" "" in
377377+ let build_log = Path.(temp_dir / "build.log") in
378378+ set_temp_log_path build_log;
379379+ let opam_repo_src = List.hd config.opam_repositories in
380380+ let opam_repo = Path.(temp_dir / "opam-repository") in
381381+ Unix.symlink opam_repo_src opam_repo;
382382+ let build_script = Doc_tools.odoc_build_script ~config ~ocaml_version in
383383+ let r = build_doc_tools_layer ~t ~temp_dir ~build_script build_log in
384384+ let () = Os.safe_rename_dir ~marker_file:layer_json temp_dir target_dir in
385385+ let dummy_pkg = OpamPackage.of_string "doc-odoc.0" in
386386+ Util.save_layer_info layer_json dummy_pkg [] [] r
387387+ in
388388+ let ocaml_ver = OpamPackage.Version.to_string (OpamPackage.version ocaml_version) in
389389+ let lock_info = Os.{ cache_dir = config.dir; stage = `Tool; package = "odoc"; version = "0"; universe = Some ocaml_ver; layer_name = Some odoc_layer_name } in
390390+ let () =
391391+ if not (Sys.file_exists layer_json) then Os.create_directory_exclusively ~marker_file:layer_json ~lock_info layer_dir write_layer
392392+ in
393393+ let exit_status = Util.load_layer_info_exit_status layer_json in
394394+ if exit_status = 0 then Some layer_dir else None
395395+396396+(** Run odoc_driver_voodoo in a container.
397397+398398+ This runs odoc_driver_voodoo which:
399399+ - Finds packages in prep/universes/{u}/{pkg}/{v}/
400400+ - Compiles .cmti/.cmt to .odoc with marker files
401401+ - Links and generates HTML
402402+ - Uses marker files to find dependencies
403403+404404+ Compile output goes to /home/opam/compile inside the container's fs.
405405+ Since the fs is an overlay of all dep layers, odoc_driver can find
406406+ dependencies' .odoc files. New .odoc files are captured by the overlay's
407407+ upperdir and end up in layer/fs/home/opam/compile/. *)
408408+let run_odoc_driver_voodoo ~t ~temp_dir ~build_log ~build_layer_dir ~doc_layer_dir ~pkg ~universe ~blessed ~dep_doc_hashes ~actions ~html_output ~ocaml_version =
409409+ let config = t.config in
410410+ let os_key = Config.os_key ~config in
411411+ let lowerdir = Path.(temp_dir / "lower") in
412412+ let upperdir = Path.(temp_dir / "upper") in
413413+ let workdir = Path.(temp_dir / "work") in
414414+ let rootfsdir = Path.(temp_dir / "rootfs") in
415415+ let () = List.iter Os.mkdir [ lowerdir; upperdir; workdir; rootfsdir ] in
416416+ (* Chown upperdir and workdir so overlay operations work correctly *)
417417+ let uid_gid = Printf.sprintf "%d:%d" t.uid t.gid in
418418+ let () = ignore (Os.sudo [ "chown"; uid_gid; upperdir; workdir ]) in
419419+ (* Create workdir with prep structure *)
420420+ let container_workdir_host = Path.(temp_dir / "workdir") in
421421+ Os.mkdir ~parents:true container_workdir_host;
422422+ (* Create html output directory - use shared html_output if provided *)
423423+ Os.mkdir ~parents:true html_output;
424424+ (* Just ensure html_output root is accessible - no recursive chown before container runs *)
425425+ ignore (Os.sudo [ "chown"; uid_gid; html_output ]);
426426+ (* Create dedicated directory for doc tool binaries to avoid conflicts with
427427+ any odoc binary that might be installed in the target package's build layer.
428428+ We copy the specific binaries from doc tool layers here. *)
429429+ let doc_tools_bin_host = Path.(lowerdir / "home" / "opam" / "doc-tools" / "bin") in
430430+ Os.mkdir ~parents:true doc_tools_bin_host;
431431+ (* Paths to doc tool binaries in their respective layers *)
432432+ let odoc_layer = Doc_tools.odoc_layer_path ~config ~ocaml_version in
433433+ let driver_layer = Doc_tools.driver_layer_path ~config in
434434+ let odoc_src = Path.(odoc_layer / "fs" / "home" / "opam" / ".opam" / "default" / "bin" / "odoc") in
435435+ let odoc_md_src = Path.(driver_layer / "fs" / "home" / "opam" / ".opam" / "default" / "bin" / "odoc-md") in
436436+ (* Container paths for the copied binaries *)
437437+ let odoc_bin = "/home/opam/doc-tools/bin/odoc" in
438438+ let odoc_md_bin = "/home/opam/doc-tools/bin/odoc-md" in
439439+ (* Copy binaries to the dedicated directory (use cp, not ln, to ensure they exist) *)
440440+ let () =
441441+ if Sys.file_exists odoc_src then
442442+ ignore (Os.sudo [ "cp"; "--archive"; odoc_src; Path.(doc_tools_bin_host / "odoc") ])
443443+ in
444444+ let () =
445445+ if Sys.file_exists odoc_md_src then
446446+ ignore (Os.sudo [ "cp"; "--archive"; odoc_md_src; Path.(doc_tools_bin_host / "odoc-md") ])
447447+ in
448448+ let argv =
449449+ [
450450+ "/usr/bin/env";
451451+ "bash";
452452+ "-c";
453453+ Odoc_gen.odoc_driver_voodoo_command ~pkg ~universe ~blessed ~actions ~odoc_bin ~odoc_md_bin;
454454+ ]
455455+ in
456456+ (* Build the lower directory from:
457457+ 1. Target package's build layer fs/ (for .cmti files etc)
458458+ 2. Dependency doc layers' fs/ (for compiled .odoc files)
459459+ 3. Doc tools layers (odoc + driver)
460460+ Order matters: with cp -n, first layer's files take precedence. *)
461461+ let target_build_fs = Path.(build_layer_dir / "fs") in
462462+ if Sys.file_exists target_build_fs then
463463+ ignore
464464+ (Os.sudo ~stderr:"/dev/null"
465465+ [
466466+ "cp";
467467+ "-n";
468468+ "--archive";
469469+ "--no-dereference";
470470+ "--recursive";
471471+ "--link";
472472+ "--no-target-directory";
473473+ target_build_fs;
474474+ lowerdir;
475475+ ]);
476476+ (* For link-and-gen phase: include the current package's doc layer fs/ that was
477477+ created during compile-only phase. This contains the compiled .odoc files. *)
478478+ let () =
479479+ if actions = "link-and-gen" then begin
480480+ let own_doc_fs = Path.(doc_layer_dir / "fs") in
481481+ if Sys.file_exists own_doc_fs then
482482+ ignore
483483+ (Os.sudo ~stderr:"/dev/null"
484484+ [
485485+ "cp";
486486+ "-n";
487487+ "--archive";
488488+ "--no-dereference";
489489+ "--recursive";
490490+ "--link";
491491+ "--no-target-directory";
492492+ own_doc_fs;
493493+ lowerdir;
494494+ ])
495495+ end
496496+ in
497497+ (* Copy dependency doc layers' fs/ (these contain compile/ output with .odoc files) *)
498498+ let doc_tool_hashes = [ Doc_tools.get_odoc_hash ~config ~ocaml_version; Doc_tools.get_driver_hash ~config ] in
499499+ let () =
500500+ List.iter
501501+ (fun hash ->
502502+ let layer_fs = Path.(config.dir / os_key / hash / "fs") in
503503+ if Sys.file_exists layer_fs then
504504+ ignore
505505+ (Os.sudo ~stderr:"/dev/null"
506506+ [
507507+ "cp";
508508+ "-n";
509509+ "--archive";
510510+ "--no-dereference";
511511+ "--recursive";
512512+ "--link";
513513+ "--no-target-directory";
514514+ layer_fs;
515515+ lowerdir;
516516+ ]))
517517+ (dep_doc_hashes @ doc_tool_hashes)
518518+ in
519519+ let etc_hosts = Path.(temp_dir / "hosts") in
520520+ let () = Os.write_to_file etc_hosts ("127.0.0.1 localhost " ^ hostname) in
521521+ let ld =
522522+ "lowerdir=" ^ String.concat ":" [ lowerdir; Path.(config.dir / os_key / "base" / "fs") ]
523523+ in
524524+ let ud = "upperdir=" ^ upperdir in
525525+ let wd = "workdir=" ^ workdir in
526526+ let mount_result =
527527+ Os.sudo ~stderr:"/dev/null"
528528+ [ "mount"; "-t"; "overlay"; "overlay"; rootfsdir; "-o"; String.concat "," [ ld; ud; wd ] ]
529529+ in
530530+ if mount_result <> 0 then 1 else
531531+ (* Mount directories:
532532+ - /workdir as a writable workspace (for _mld, _index temp dirs)
533533+ - /workdir/prep with entire prep structure (read-only, mounted on top of workdir)
534534+ - /html for output
535535+ Note: compile output goes to /home/opam/compile inside the fs overlay *)
536536+ let prep_dir = Path.(doc_layer_dir / "prep") in
537537+ (* Create prep mount point BEFORE chown - otherwise we can't create dirs if uid != 1000 *)
538538+ let () = Os.mkdir ~parents:true Path.(container_workdir_host / "prep") in
539539+ let () = ignore (Os.sudo [ "chown"; "-R"; uid_gid; container_workdir_host ]) in
540540+ let mounts =
541541+ [
542542+ (* Mount the workdir as writable - this allows _mld and _index directories to be created *)
543543+ { Mount.ty = "bind"; src = container_workdir_host; dst = Odoc_gen.container_workdir; options = [ "rw"; "rbind"; "rprivate" ] };
544544+ (* Mount prep directory on top - this overlays the empty prep mount point *)
545545+ { ty = "bind"; src = prep_dir; dst = Odoc_gen.container_workdir ^ "/prep"; options = [ "ro"; "rbind"; "rprivate" ] };
546546+ { ty = "bind"; src = html_output; dst = Odoc_gen.container_html_output; options = [ "rw"; "rbind"; "rprivate" ] };
547547+ { ty = "bind"; src = etc_hosts; dst = "/etc/hosts"; options = [ "ro"; "rbind"; "rprivate" ] };
548548+ ]
549549+ in
550550+ (* Add opam bin directory to PATH for odoc_driver_voodoo *)
551551+ let odoc_env =
552552+ List.map
553553+ (fun (k, v) ->
554554+ if k = "PATH" then (k, "/home/opam/.opam/default/bin:" ^ v)
555555+ else (k, v))
556556+ env
557557+ in
558558+ let config_runc =
559559+ make ~root:rootfsdir ~cwd:Odoc_gen.container_workdir ~argv ~hostname ~uid:t.uid
560560+ ~gid:t.gid ~env:odoc_env ~mounts ~network:false
561561+ in
562562+ let () =
563563+ Os.write_to_file Path.(temp_dir / "config.json") (Yojson.Safe.pretty_to_string config_runc)
564564+ in
565565+ let container_id = "odoc-voodoo-" ^ Filename.basename temp_dir in
566566+ (* Clean up any stale container with same ID from previous runs *)
567567+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
568568+ let result =
569569+ Os.sudo ~stdout:build_log ~stderr:build_log
570570+ [ "runc"; "run"; "-b"; temp_dir; container_id ]
571571+ in
572572+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
573573+ (* Unmount overlay *)
574574+ let _ = Os.sudo ~stderr:"/dev/null" [ "umount"; rootfsdir ] in
575575+ (* Copy compile output from upperdir to doc layer's fs.
576576+ The upperdir captures all filesystem changes made in the container.
577577+ We need to persist the /home/opam/compile directory in the doc layer. *)
578578+ let upper_compile = Path.(upperdir / "home" / "opam" / "compile") in
579579+ let doc_compile = Path.(doc_layer_dir / "fs" / "home" / "opam" / "compile") in
580580+ let () =
581581+ if Sys.file_exists upper_compile then begin
582582+ Os.mkdir ~parents:true (Filename.dirname doc_compile);
583583+ (* Remove existing compile dir to avoid "File exists" errors on rerun *)
584584+ if Sys.file_exists doc_compile then
585585+ Os.sudo_rm_rf doc_compile;
586586+ ignore (Os.sudo [ "cp"; "-a"; upper_compile; doc_compile ])
587587+ end
588588+ in
589589+ (* Clean up temp directories *)
590590+ let _ = Os.sudo [ "rm"; "-rf"; lowerdir; workdir; rootfsdir; upperdir; container_workdir_host ] in
591591+ (* Clean up prep folder for this package only after SUCCESSFUL final phase.
592592+ For separate phases: compile-only runs first, then link-and-gen.
593593+ Only delete prep after successful link-and-gen (or "all" for single-phase).
594594+ If link-and-gen failed (e.g., x-extra-doc-deps not yet built), keep prep
595595+ so global_deferred_doc_link can try again later. *)
596596+ let is_final_phase = actions = "all" || actions = "link-and-gen" in
597597+ let link_succeeded =
598598+ if not is_final_phase then false
599599+ else
600600+ (* Check if the log file is substantial (> 1KB indicates actual work done) *)
601601+ let log_file = Path.(doc_layer_dir / Printf.sprintf "odoc-voodoo-%s.log" actions) in
602602+ try
603603+ let st = Unix.stat log_file in
604604+ result = 0 && st.Unix.st_size > 1000
605605+ with _ -> result = 0
606606+ in
607607+ let () =
608608+ if is_final_phase && link_succeeded then begin
609609+ let pkg_name = OpamPackage.name_to_string pkg in
610610+ let pkg_version = OpamPackage.version_to_string pkg in
611611+ let prep_pkg_dir = Path.(doc_layer_dir / "prep" / "universes" / universe / pkg_name / pkg_version) in
612612+ try Os.rm ~recursive:true prep_pkg_dir with _ -> ()
613613+ end
614614+ in
615615+ (* Chown only the specific package's html directory after container finishes *)
616616+ let pkg_html_dir = Path.(html_output / "p" / OpamPackage.name_to_string pkg) in
617617+ if Sys.file_exists pkg_html_dir then
618618+ ignore (Os.sudo [ "chown"; "-R"; uid_gid; pkg_html_dir ]);
619619+ result
620620+621621+let jtw_layer_hash ~t ~build_hash ~ocaml_version =
622622+ let config = t.config in
623623+ let jtw_tools_hash = Jtw_tools.get_hash ~config ~ocaml_version in
624624+ Jtw_gen.compute_jtw_layer_hash ~build_hash ~jtw_tools_hash
625625+626626+(** Ensure the jtw-tools layer exists and is built.
627627+ Contains js_of_ocaml and js_top_worker built with the specified OCaml version.
628628+ Returns the layer directory path if successful, None if build failed. *)
629629+let ensure_jtw_tools_layer ~t ~ocaml_version : string option =
630630+ let config = t.config in
631631+ let layer_dir = Jtw_tools.layer_path ~config ~ocaml_version in
632632+ let jtw_tools_layer_name = Jtw_tools.layer_name ~config ~ocaml_version in
633633+ let layer_json = Path.(layer_dir / "layer.json") in
634634+ let write_layer ~set_temp_log_path target_dir =
635635+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-jtw-tools-" "" in
636636+ let build_log = Path.(temp_dir / "build.log") in
637637+ set_temp_log_path build_log;
638638+ let opam_repo_src = List.hd config.opam_repositories in
639639+ let opam_repo = Path.(temp_dir / "opam-repository") in
640640+ Unix.symlink opam_repo_src opam_repo;
641641+ let build_script = Jtw_tools.build_script ~config ~ocaml_version in
642642+ let r = build_doc_tools_layer ~t ~temp_dir ~build_script build_log in
643643+ let () = Os.safe_rename_dir ~marker_file:layer_json temp_dir target_dir in
644644+ let dummy_pkg = OpamPackage.of_string "jtw-tools.0" in
645645+ Util.save_layer_info layer_json dummy_pkg [] [] r
646646+ in
647647+ let ocaml_ver = OpamPackage.Version.to_string (OpamPackage.version ocaml_version) in
648648+ let lock_info = Os.{ cache_dir = config.dir; stage = `Tool; package = "jtw-tools"; version = "0"; universe = Some ocaml_ver; layer_name = Some jtw_tools_layer_name } in
649649+ let () =
650650+ if not (Sys.file_exists layer_json) then Os.create_directory_exclusively ~marker_file:layer_json ~lock_info layer_dir write_layer
651651+ in
652652+ let exit_status = Util.load_layer_info_exit_status layer_json in
653653+ if exit_status = 0 then Some layer_dir else None
654654+655655+(** Run jtw generation in a container: compile .cma -> .cma.js, copy .cmi, META *)
656656+let run_jtw_in_container ~t ~temp_dir ~build_log ~build_layer_dir ~jtw_layer_dir ~dep_build_hashes ~pkg ~installed_libs ~ocaml_version =
657657+ let config = t.config in
658658+ let os_key = Config.os_key ~config in
659659+ let lowerdir = Path.(temp_dir / "lower") in
660660+ let upperdir = Path.(temp_dir / "upper") in
661661+ let workdir = Path.(temp_dir / "work") in
662662+ let rootfsdir = Path.(temp_dir / "rootfs") in
663663+ let () = List.iter Os.mkdir [ lowerdir; upperdir; workdir; rootfsdir ] in
664664+ let uid_gid = Printf.sprintf "%d:%d" t.uid t.gid in
665665+ let () = ignore (Os.sudo [ "chown"; uid_gid; upperdir; workdir ]) in
666666+ (* Build script to compile .cma files *)
667667+ let script = Jtw_gen.jtw_container_script ~pkg ~installed_libs in
668668+ let argv = [ "/usr/bin/env"; "bash"; "-c"; script ] in
669669+ (* Build lower directory from build layer + dependency build layers + jtw-tools layer *)
670670+ let target_build_fs = Path.(build_layer_dir / "fs") in
671671+ if Sys.file_exists target_build_fs then
672672+ ignore (Os.sudo ~stderr:"/dev/null"
673673+ ["cp"; "-n"; "--archive"; "--no-dereference"; "--recursive"; "--link"; "--no-target-directory"; target_build_fs; lowerdir]);
674674+ (* Copy dependency build layers *)
675675+ List.iter (fun hash ->
676676+ let layer_fs = Path.(config.dir / os_key / hash / "fs") in
677677+ if Sys.file_exists layer_fs then
678678+ ignore (Os.sudo ~stderr:"/dev/null"
679679+ ["cp"; "-n"; "--archive"; "--no-dereference"; "--recursive"; "--link"; "--no-target-directory"; layer_fs; lowerdir])
680680+ ) dep_build_hashes;
681681+ (* Copy jtw-tools layer *)
682682+ let jtw_tools_hash = Jtw_tools.get_hash ~config ~ocaml_version in
683683+ let jtw_tools_fs = Path.(config.dir / os_key / jtw_tools_hash / "fs") in
684684+ if Sys.file_exists jtw_tools_fs then
685685+ ignore (Os.sudo ~stderr:"/dev/null"
686686+ ["cp"; "-n"; "--archive"; "--no-dereference"; "--recursive"; "--link"; "--no-target-directory"; jtw_tools_fs; lowerdir]);
687687+ (* Create output directory in container *)
688688+ let jtw_output_host = Path.(temp_dir / "jtw-output") in
689689+ Os.mkdir ~parents:true jtw_output_host;
690690+ ignore (Os.sudo [ "chown"; uid_gid; jtw_output_host ]);
691691+ let etc_hosts = Path.(temp_dir / "hosts") in
692692+ let () = Os.write_to_file etc_hosts ("127.0.0.1 localhost " ^ hostname) in
693693+ let ld = "lowerdir=" ^ String.concat ":" [ lowerdir; Path.(config.dir / os_key / "base" / "fs") ] in
694694+ let ud = "upperdir=" ^ upperdir in
695695+ let wd = "workdir=" ^ workdir in
696696+ let mount_result = Os.sudo ~stderr:"/dev/null"
697697+ [ "mount"; "-t"; "overlay"; "overlay"; rootfsdir; "-o"; String.concat "," [ ld; ud; wd ] ] in
698698+ if mount_result <> 0 then 1
699699+ else begin
700700+ let mounts = [
701701+ { Mount.ty = "bind"; src = jtw_output_host; dst = "/home/opam/jtw-output"; options = [ "rw"; "rbind"; "rprivate" ] };
702702+ { ty = "bind"; src = etc_hosts; dst = "/etc/hosts"; options = [ "ro"; "rbind"; "rprivate" ] };
703703+ ] in
704704+ let jtw_env = List.map (fun (k, v) ->
705705+ if k = "PATH" then (k, "/home/opam/.opam/default/bin:" ^ v) else (k, v)
706706+ ) env in
707707+ let config_runc = make ~root:rootfsdir ~cwd:"/home/opam" ~argv ~hostname ~uid:t.uid ~gid:t.gid ~env:jtw_env ~mounts ~network:false in
708708+ let () = Os.write_to_file Path.(temp_dir / "config.json") (Yojson.Safe.pretty_to_string config_runc) in
709709+ let container_id = "jtw-" ^ Filename.basename temp_dir in
710710+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
711711+ let result = Os.sudo ~stdout:build_log ~stderr:build_log [ "runc"; "run"; "-b"; temp_dir; container_id ] in
712712+ let _ = Os.sudo ~stdout:"/dev/null" ~stderr:"/dev/null" [ "runc"; "delete"; "-f"; container_id ] in
713713+ let _ = Os.sudo ~stderr:"/dev/null" [ "umount"; rootfsdir ] in
714714+ (* Copy output from container to jtw layer *)
715715+ let jtw_output_lib = Path.(jtw_output_host / "lib") in
716716+ if Sys.file_exists jtw_output_lib then begin
717717+ let jtw_layer_lib = Path.(jtw_layer_dir / "lib") in
718718+ Os.mkdir ~parents:true (Filename.dirname jtw_layer_lib);
719719+ ignore (Os.sudo [ "cp"; "-a"; jtw_output_lib; jtw_layer_lib ]);
720720+ (* Fix ownership so subsequent writes (cmi, META, dynamic_cmis.json) work *)
721721+ let uid_gid = Printf.sprintf "%d:%d" (Unix.getuid ()) (Unix.getgid ()) in
722722+ ignore (Os.sudo [ "chown"; "-R"; uid_gid; jtw_layer_lib ])
723723+ end;
724724+ (* Also copy .cmi and META from the build layer to the jtw layer *)
725725+ let build_lib = Path.(build_layer_dir / "fs" / "home" / "opam" / ".opam" / "default" / "lib") in
726726+ List.iter (fun rel_path ->
727727+ if Filename.check_suffix rel_path ".cmi" || Filename.basename rel_path = "META" then begin
728728+ let src = Path.(build_lib / rel_path) in
729729+ let dst = Path.(jtw_layer_dir / "lib" / rel_path) in
730730+ if Sys.file_exists src then begin
731731+ Os.mkdir ~parents:true (Filename.dirname dst);
732732+ (try Os.cp src dst with _ -> ())
733733+ end
734734+ end
735735+ ) installed_libs;
736736+ (* Generate dynamic_cmis.json for each lib subdirectory that has .cmi files *)
737737+ let jtw_lib_dir = Path.(jtw_layer_dir / "lib") in
738738+ if Sys.file_exists jtw_lib_dir then begin
739739+ let rec scan_dirs base rel =
740740+ let full = if rel = "" then base else Path.(base / rel) in
741741+ if Sys.file_exists full && Sys.is_directory full then begin
742742+ let entries = try Sys.readdir full |> Array.to_list with _ -> [] in
743743+ let cmi_files = List.filter (fun f -> Filename.check_suffix f ".cmi") entries in
744744+ if cmi_files <> [] then begin
745745+ let dcs_url = "lib/" ^ rel in
746746+ let dcs_json = Jtw_gen.generate_dynamic_cmis_json ~dcs_url cmi_files in
747747+ Os.write_to_file Path.(full / "dynamic_cmis.json") dcs_json
748748+ end;
749749+ (* Recurse into subdirectories *)
750750+ List.iter (fun name ->
751751+ let sub = if rel = "" then name else rel ^ "/" ^ name in
752752+ let sub_full = Path.(base / sub) in
753753+ if Sys.file_exists sub_full && Sys.is_directory sub_full then
754754+ scan_dirs base sub
755755+ ) entries
756756+ end
757757+ in
758758+ scan_dirs jtw_lib_dir ""
759759+ end;
760760+ (* Clean up *)
761761+ let _ = Os.sudo [ "rm"; "-rf"; lowerdir; workdir; rootfsdir; upperdir; jtw_output_host ] in
762762+ result
763763+ end
764764+765765+let generate_jtw ~t ~build_layer_dir ~jtw_layer_dir ~dep_build_hashes ~pkg ~installed_libs ~ocaml_version =
766766+ let config = t.config in
767767+ if not config.with_jtw then None
768768+ else
769769+ match ensure_jtw_tools_layer ~t ~ocaml_version with
770770+ | Some _tools_dir ->
771771+ if not (Jtw_tools.has_jsoo ~config ~ocaml_version) then
772772+ Some (Jtw_gen.jtw_result_to_yojson (Jtw_gen.Jtw_failure "js_of_ocaml not installed in jtw-tools layer"))
773773+ else begin
774774+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-jtw-" "" in
775775+ let build_log = Path.(temp_dir / "jtw.log") in
776776+ let status =
777777+ try
778778+ run_jtw_in_container ~t ~temp_dir ~build_log ~build_layer_dir ~jtw_layer_dir ~dep_build_hashes ~pkg ~installed_libs ~ocaml_version
779779+ with _ -> 1
780780+ in
781781+ let layer_log = Path.(jtw_layer_dir / "jtw.log") in
782782+ (try Os.cp build_log layer_log with _ -> ());
783783+ (try Os.rm ~recursive:true temp_dir with _ -> ());
784784+ if status = 0 then
785785+ Some (Jtw_gen.jtw_result_to_yojson Jtw_gen.Jtw_success)
786786+ else
787787+ Some (Jtw_gen.jtw_result_to_yojson (Jtw_gen.Jtw_failure (Printf.sprintf "jtw generation exited with status %d" status)))
788788+ end
789789+ | None ->
790790+ Some (Jtw_gen.jtw_result_to_yojson Jtw_gen.Jtw_skipped)
791791+792792+let generate_docs ~t ~build_layer_dir ~doc_layer_dir ~dep_doc_hashes ~pkg ~installed_libs ~installed_docs ~phase ~ocaml_version =
793793+ let config = t.config in
794794+ if not config.with_doc then None
795795+ else
796796+ (* Ensure both doc tool layers exist: shared driver layer + per-version odoc layer *)
797797+ match ensure_driver_layer ~t, ensure_odoc_layer ~t ~ocaml_version with
798798+ | Some _driver_dir, Some _odoc_dir ->
799799+ (* Check if odoc_driver_voodoo is available in driver layer *)
800800+ if not (Doc_tools.has_odoc_driver_voodoo ~config) then
801801+ Some (Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_failure "odoc_driver_voodoo not installed in driver layer"))
802802+ (* Check if odoc is available in odoc layer *)
803803+ else if not (Doc_tools.has_odoc ~config ~ocaml_version) then
804804+ Some (Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_failure "odoc not installed in odoc layer"))
805805+ else begin
806806+ (* Compute universe hash from dependency doc hashes.
807807+ For link-only phase, use the ORIGINAL dep_doc_hashes from layer.json
808808+ (the ones used when creating the prep structure during compile phase).
809809+ The extended dep_doc_hashes (with x-extra-doc-deps) are only used for overlay. *)
810810+ let universe =
811811+ match phase with
812812+ | S.Doc_link_only ->
813813+ (* Read original dep_doc_hashes from layer.json to get same universe hash *)
814814+ let layer_json = Path.(doc_layer_dir / "layer.json") in
815815+ let original_dep_hashes = Util.load_layer_info_dep_doc_hashes layer_json in
816816+ Odoc_gen.compute_universe_hash original_dep_hashes
817817+ | S.Doc_all | S.Doc_compile_only ->
818818+ Odoc_gen.compute_universe_hash dep_doc_hashes
819819+ in
820820+ (* Create prep structure for compile phases (not needed for link-only) *)
821821+ let prep_result =
822822+ match phase with
823823+ | S.Doc_link_only ->
824824+ (* For link-only, prep should already exist from compile phase *)
825825+ Ok ()
826826+ | S.Doc_all | S.Doc_compile_only ->
827827+ try
828828+ ignore (Odoc_gen.create_prep_structure ~source_layer_dir:build_layer_dir ~dest_layer_dir:doc_layer_dir ~universe ~pkg ~installed_libs ~installed_docs);
829829+ Ok ()
830830+ with exn -> Error (Printexc.to_string exn)
831831+ in
832832+ match prep_result with
833833+ | Error msg ->
834834+ let error = Printf.sprintf "Failed to create prep structure: %s" msg in
835835+ Some (Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_failure error))
836836+ | Ok () ->
837837+ (* Change ownership of entire prep directory so container can read it *)
838838+ let uid_gid = Printf.sprintf "%d:%d" t.uid t.gid in
839839+ let prep_dir = Path.(doc_layer_dir / "prep") in
840840+ if Sys.file_exists prep_dir then
841841+ ignore (Os.sudo [ "chown"; "-R"; uid_gid; prep_dir ]);
842842+ (* Determine blessing status from pre-computed map *)
843843+ let blessed =
844844+ match config.blessed_map with
845845+ | Some map -> Blessing.is_blessed map pkg
846846+ | None -> false
847847+ in
848848+ (* Determine HTML output directory - use shared if specified, else per-layer *)
849849+ let final_html_output_dir = match config.html_output with
850850+ | Some dir -> dir
851851+ | None -> Path.(doc_layer_dir / "html")
852852+ in
853853+ (* For atomic swaps: use a staging directory, then swap on success.
854854+ This implements "graceful degradation" - existing docs are only
855855+ replaced when the new build succeeds. *)
856856+ let staging_html_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-html-staging-" "" in
857857+ let pkg_name = OpamPackage.name_to_string pkg in
858858+ let pkg_version = OpamPackage.version_to_string pkg in
859859+ (* Run odoc_driver_voodoo with appropriate phases, writing to staging *)
860860+ let run_phase ~actions =
861861+ let voodoo_temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-odoc-voodoo-" "" in
862862+ let voodoo_log = Path.(voodoo_temp_dir / "voodoo.log") in
863863+ let status =
864864+ try
865865+ (* Write to staging directory instead of final *)
866866+ run_odoc_driver_voodoo ~t ~temp_dir:voodoo_temp_dir ~build_log:voodoo_log ~build_layer_dir ~doc_layer_dir ~pkg ~universe ~blessed ~dep_doc_hashes ~actions ~html_output:staging_html_dir ~ocaml_version
867867+ with
868868+ | _ -> 1
869869+ in
870870+ let layer_log = Path.(doc_layer_dir / Printf.sprintf "odoc-voodoo-%s.log" actions) in
871871+ (try Os.cp voodoo_log layer_log with _ -> ());
872872+ (try Os.rm ~recursive:true voodoo_temp_dir with _ -> ());
873873+ status
874874+ in
875875+ let voodoo_status =
876876+ match phase with
877877+ | S.Doc_all ->
878878+ (* Run all phases at once *)
879879+ run_phase ~actions:"all"
880880+ | S.Doc_compile_only ->
881881+ (* Run only compile phase - link will happen later after post deps built *)
882882+ run_phase ~actions:"compile-only"
883883+ | S.Doc_link_only ->
884884+ (* Run only link and html-generate - post deps should now be available *)
885885+ run_phase ~actions:"link-and-gen"
886886+ in
887887+ (* Handle result with atomic swap for graceful degradation *)
888888+ let result =
889889+ if voodoo_status = 0 then begin
890890+ (* Success: atomically swap staging to final *)
891891+ let staging_pkg_dir =
892892+ if blessed then
893893+ Path.(staging_html_dir / "p" / pkg_name / pkg_version)
894894+ else
895895+ Path.(staging_html_dir / "u" / universe / pkg_name / pkg_version)
896896+ in
897897+ let final_pkg_dir =
898898+ if blessed then
899899+ Path.(final_html_output_dir / "p" / pkg_name / pkg_version)
900900+ else
901901+ Path.(final_html_output_dir / "u" / universe / pkg_name / pkg_version)
902902+ in
903903+ (* Check if staging produced output *)
904904+ if Sys.file_exists staging_pkg_dir then begin
905905+ let final_pkg_parent = Filename.dirname final_pkg_dir in
906906+ Os.mkdir ~parents:true final_pkg_parent;
907907+ let old_dir = final_pkg_dir ^ ".old" in
908908+ let has_existing = Sys.file_exists final_pkg_dir in
909909+ (* Step 1: If final exists, move to .old *)
910910+ let swap_ok = ref true in
911911+ (if has_existing then begin
912912+ if Sys.file_exists old_dir then Os.sudo_rm_rf old_dir;
913913+ try Unix.rename final_pkg_dir old_dir with
914914+ | Unix.Unix_error (err, _, _) ->
915915+ Os.log "atomic swap: failed to rename %s to %s: %s" final_pkg_dir old_dir (Unix.error_message err);
916916+ swap_ok := false
917917+ end);
918918+ (* Step 2: Move staging to final *)
919919+ if !swap_ok then begin
920920+ (try
921921+ (* Use sudo mv since container may have created root-owned files *)
922922+ let r = Os.sudo [ "mv"; staging_pkg_dir; final_pkg_dir ] in
923923+ if r <> 0 then swap_ok := false
924924+ with _ -> swap_ok := false)
925925+ end;
926926+ (* Step 3: Remove .old backup on success *)
927927+ if !swap_ok then begin
928928+ if has_existing && Sys.file_exists old_dir then
929929+ Os.sudo_rm_rf old_dir;
930930+ (* Step 4: For blessed packages, write universes.json for GC tracking *)
931931+ if blessed then begin
932932+ try
933933+ let universes_json = Path.(final_pkg_dir / "universes.json") in
934934+ let json_content = Printf.sprintf {|{"universes": ["%s"]}|} universe in
935935+ Os.write_to_file universes_json json_content
936936+ with Sys_error err ->
937937+ Os.log "atomic swap: warning - failed to write universes.json: %s" err
938938+ end;
939939+ Os.log "atomic swap: successfully committed docs for %s/%s" pkg_name pkg_version;
940940+ Some (Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_success { html_path = final_html_output_dir; blessed }))
941941+ end else begin
942942+ (* Swap failed - restore old if we moved it *)
943943+ if has_existing && Sys.file_exists old_dir then begin
944944+ try Unix.rename old_dir final_pkg_dir with _ -> ()
945945+ end;
946946+ Some (Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_failure "Failed to atomically swap docs"))
947947+ end
948948+ end else begin
949949+ (* No output produced - could be compile-only phase *)
950950+ Some (Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_success { html_path = final_html_output_dir; blessed }))
951951+ end
952952+ end else begin
953953+ (* Failure: leave original docs intact (graceful degradation) *)
954954+ let error_msg =
955955+ (* Try to find the log file - could be from any phase *)
956956+ let log_files = ["odoc-voodoo-all.log"; "odoc-voodoo-compile-only.log"; "odoc-voodoo-link-and-gen.log"] in
957957+ try
958958+ List.find_map (fun name ->
959959+ let log_path = Path.(doc_layer_dir / name) in
960960+ if Sys.file_exists log_path then Some (Os.read_from_file log_path) else None
961961+ ) log_files |> Option.value ~default:(Printf.sprintf "odoc_driver_voodoo exited with status %d" voodoo_status)
962962+ with _ -> Printf.sprintf "odoc_driver_voodoo exited with status %d" voodoo_status
963963+ in
964964+ Os.log "graceful degradation: keeping old docs for %s/%s (build failed)" pkg_name pkg_version;
965965+ Some (Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_failure error_msg))
966966+ end
967967+ in
968968+ (* Clean up staging directory *)
969969+ Os.sudo_rm_rf staging_html_dir;
970970+ result
971971+ end
972972+ | None, _ ->
973973+ (* Driver layer failed - this will be logged once at the layer level, not per-package *)
974974+ Some (Odoc_gen.doc_result_to_yojson Odoc_gen.Doc_skipped)
975975+ | _, None ->
976976+ (* Odoc layer failed - this will be logged once at the layer level, not per-package *)
977977+ Some (Odoc_gen.doc_result_to_yojson Odoc_gen.Doc_skipped)
+1712
day10/bin/main.ml
···11+module Solver = Opam_0install.Solver.Make (Dir_context)
22+module Input = Solver.Input
33+module Output = Solver.Solver.Output
44+module Role = Solver.Input.Role
55+module Role_map = Output.RoleMap
66+77+let container =
88+ match OpamSysPoll.os OpamVariable.Map.empty with
99+ | Some "linux" -> (module Linux : S.CONTAINER)
1010+ | Some "freebsd" -> (module Freebsd : S.CONTAINER)
1111+ | Some "win32" -> (module Windows : S.CONTAINER)
1212+ | _ -> (module Dummy : S.CONTAINER)
1313+1414+module Container = (val container)
1515+1616+let init t =
1717+ let config = Container.config ~t in
1818+ let os_dir = Path.(config.dir / Config.os_key ~config) in
1919+ let () = Os.mkdir ~parents:true os_dir in
2020+ let root = Path.(os_dir / "base") in
2121+ if not (Sys.file_exists root) then
2222+ Os.create_directory_exclusively root @@ fun ~set_temp_log_path:_ target_dir ->
2323+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-" "" in
2424+ let opam_repository = Util.create_opam_repository temp_dir in
2525+ let build_log = Path.(temp_dir / "build.log") in
2626+ let _ = Container.run ~t ~temp_dir opam_repository build_log in
2727+ Unix.rename temp_dir target_dir
2828+2929+let () = OpamFormatConfig.init ()
3030+3131+(* let root = OpamStateConfig.opamroot ()
3232+let _ = OpamStateConfig.load_defaults root *)
3333+let () = OpamCoreConfig.init ?debug_level:(Some 10) ?debug_sections:(Some (OpamStd.String.Map.singleton "foo" (Some 10))) ()
3434+3535+let opam_env ~(config : Config.t) pkg v =
3636+ (* if List.mem v OpamPackageVar.predefined_depends_variables then (Some (OpamTypes.B true))
3737+ else *)
3838+ match OpamVariable.Full.to_string v with
3939+ | "version" -> Some (OpamTypes.S (OpamPackage.Version.to_string (OpamPackage.version pkg)))
4040+ | "with-test" ->
4141+ let is_tested_pkg = String.equal (OpamPackage.to_string pkg) config.package in
4242+ Some (OpamTypes.B (config.with_test && is_tested_pkg))
4343+ | "with-dev"
4444+ | "with-dev-setup"
4545+ | "dev"
4646+ | "with-doc" ->
4747+ Some (OpamTypes.B false)
4848+ | "build" -> Some (OpamTypes.B true)
4949+ | "post" -> None
5050+ | x -> Config.std_env ~config x
5151+5252+let solve (config : Config.t) pkg =
5353+ (* Build constraints: always pin target package version, optionally pin OCaml version *)
5454+ let pkg_constraint = (OpamPackage.name pkg, (`Eq, OpamPackage.version pkg)) in
5555+ let constraints =
5656+ match config.ocaml_version with
5757+ | Some ocaml_ver ->
5858+ OpamPackage.Name.Map.of_list
5959+ [ (OpamPackage.name ocaml_ver, (`Eq, OpamPackage.version ocaml_ver)); pkg_constraint ]
6060+ | None ->
6161+ (* When no OCaml version specified, constrain ocaml-base-compiler to >= 4.08 for better doc-tools compatibility *)
6262+ let ocaml_constraint = (OpamPackage.Name.of_string "ocaml-base-compiler", (`Geq, OpamPackage.Version.of_string "4.08.0")) in
6363+ OpamPackage.Name.Map.of_list [ ocaml_constraint; pkg_constraint ]
6464+ in
6565+ let pins =
6666+ Option.fold ~none:OpamPackage.Name.Map.empty
6767+ ~some:(fun directory ->
6868+ OpamPackage.Name.Map.empty
6969+ |> OpamPackage.Name.Map.add (OpamPackage.Name.of_string config.package)
7070+ (OpamPackage.Version.of_string "dev", OpamFile.OPAM.read (OpamFile.make (OpamFilename.raw Path.((directory / config.package) ^ ".opam")))))
7171+ config.directory
7272+ in
7373+ let test = if config.with_test then OpamPackage.Name.Set.singleton (OpamPackage.name pkg) else OpamPackage.Name.Set.empty in
7474+ let context =
7575+ Dir_context.create ~env:(Config.std_env ~config) ~constraints ~pins ~test
7676+ (List.map (fun opam_repository -> Path.(opam_repository / "packages")) config.opam_repositories)
7777+ in
7878+ (* Roots to solve: always include ocaml compiler and target package *)
7979+ let ocaml_name = match config.ocaml_version with
8080+ | Some v -> OpamPackage.name v
8181+ | None -> OpamPackage.Name.of_string "ocaml-base-compiler"
8282+ in
8383+ (* Add x-extra-doc-deps packages as solver roots when with_doc is enabled.
8484+ This ensures they're included in the solution so their docs are available
8585+ for cross-package linking during the doc generation phase. *)
8686+ let base_roots = [ ocaml_name; OpamPackage.name pkg ] in
8787+ let roots =
8888+ if config.with_doc then begin
8989+ match Util.opam_file config.opam_repositories pkg with
9090+ | None -> base_roots
9191+ | Some opamfile ->
9292+ let extra_doc_deps = Odoc_gen.get_extra_doc_deps opamfile in
9393+ if OpamPackage.Name.Set.is_empty extra_doc_deps then
9494+ base_roots
9595+ else begin
9696+ let new_roots = OpamPackage.Name.Set.fold (fun name acc -> name :: acc) extra_doc_deps base_roots in
9797+ Os.log "solve: adding x-extra-doc-deps to roots: %s" (OpamPackage.Name.Set.to_string extra_doc_deps);
9898+ new_roots
9999+ end
100100+ end else
101101+ base_roots
102102+ in
103103+ let r = Solver.solve context roots in
104104+ match r with
105105+ | Ok out ->
106106+ let sels = Output.to_map out in
107107+ let depends = Hashtbl.create 100 in
108108+ let classify x =
109109+ match Solver.package_name x with
110110+ | Some pkg -> `Opam pkg
111111+ | None -> `Virtual x
112112+ in
113113+ let () =
114114+ Role_map.iter
115115+ (fun role sel ->
116116+ let impl = Output.unwrap sel in
117117+ Solver.Input.requires role impl |> fst
118118+ |> List.iter (fun dep ->
119119+ let dep = Input.dep_info dep in
120120+ let dep_role = dep.dep_role in
121121+ if dep.dep_importance <> `Restricts then Hashtbl.add depends (classify role) (classify dep_role)))
122122+ sels
123123+ in
124124+ let rec expand role =
125125+ Hashtbl.find_all depends role
126126+ |> List.concat_map (function
127127+ | `Opam dep -> [ dep ]
128128+ | `Virtual _ as role -> expand role)
129129+ in
130130+ let pkgs = Solver.packages_of_result out |> OpamPackage.Set.of_list in
131131+ let pkgnames = OpamPackage.names_of_packages pkgs in
132132+ let deptree =
133133+ OpamPackage.Set.fold
134134+ (fun pkg acc ->
135135+ let opam = Dir_context.load context pkg in
136136+ let deps = OpamFile.OPAM.depends opam |> OpamFilter.partial_filter_formula (opam_env ~config pkg) in
137137+ let with_post = OpamFilter.filter_deps ~build:true ~post:true deps |> OpamFormula.all_names in
138138+ let without_post = OpamFilter.filter_deps ~build:true ~post:false deps |> OpamFormula.all_names in
139139+ let deppost = OpamPackage.Name.Set.diff with_post without_post in
140140+ let depopts = OpamFile.OPAM.depopts opam |> OpamFormula.all_names in
141141+ let depopts = OpamPackage.Name.Set.inter depopts pkgnames |> OpamPackage.Name.Set.to_list in
142142+ let name = OpamPackage.name pkg in
143143+ let deps =
144144+ expand (`Opam name) @ depopts |> OpamPackage.Name.Set.of_list |> fun x ->
145145+ OpamPackage.Name.Set.diff x deppost |> OpamPackage.packages_of_names pkgs
146146+ in
147147+ OpamPackage.Map.add pkg deps acc)
148148+ pkgs OpamPackage.Map.empty
149149+ in
150150+ let rec dfs map pkg =
151151+ let deps = OpamPackage.Map.find pkg deptree in
152152+ OpamPackage.Set.fold
153153+ (fun p acc ->
154154+ match OpamPackage.Map.mem p acc with
155155+ | true -> acc
156156+ | false -> dfs acc p)
157157+ deps (OpamPackage.Map.add pkg deps map)
158158+ in
159159+ (* Start DFS from target package *)
160160+ let solution = dfs OpamPackage.Map.empty pkg in
161161+ (* Also include x-extra-doc-deps packages and their dependencies when with_doc is enabled *)
162162+ let solution =
163163+ if config.with_doc then
164164+ match Util.opam_file config.opam_repositories pkg with
165165+ | None -> solution
166166+ | Some opamfile ->
167167+ let extra_doc_deps = Odoc_gen.get_extra_doc_deps opamfile in
168168+ OpamPackage.Name.Set.fold (fun name sol ->
169169+ (* Find the package version for this name in pkgs *)
170170+ match OpamPackage.Set.find_opt (fun p -> OpamPackage.name p = name) pkgs with
171171+ | None -> sol (* Extra doc dep not in solver result *)
172172+ | Some extra_pkg ->
173173+ if OpamPackage.Map.mem extra_pkg sol then sol
174174+ else dfs sol extra_pkg
175175+ ) extra_doc_deps solution
176176+ else
177177+ solution
178178+ in
179179+ Ok solution
180180+ | Error problem -> Error (Solver.diagnostics problem)
181181+182182+(** Get the extra link deps for a package from both post deps and x-extra-doc-deps.
183183+ Returns the set of package names that are needed for linking but not compiling. *)
184184+let get_extra_link_deps opamfile =
185185+ let post_deps = Odoc_gen.get_post_deps opamfile in
186186+ let extra_doc_deps = Odoc_gen.get_extra_doc_deps opamfile in
187187+ OpamPackage.Name.Set.union post_deps extra_doc_deps
188188+189189+(** Extract the OCaml version from a solution.
190190+ Looks for ocaml-base-compiler, ocaml-variants, or ocaml package. *)
191191+let extract_ocaml_version solution =
192192+ let ocaml_packages = [ "ocaml-base-compiler"; "ocaml-variants"; "ocaml" ] in
193193+ let pkgs = OpamPackage.Map.bindings solution |> List.map fst in
194194+ List.find_map
195195+ (fun name ->
196196+ List.find_opt (fun pkg -> String.equal (OpamPackage.name_to_string pkg) name) pkgs)
197197+ ocaml_packages
198198+199199+let rec topological_sort pkgs =
200200+ match OpamPackage.Map.is_empty pkgs with
201201+ | true -> []
202202+ | false ->
203203+ (* Find all packages which can be installed *)
204204+ let installable, remainder = OpamPackage.Map.partition (fun _ deps -> OpamPackage.Set.is_empty deps) pkgs in
205205+ let () = assert (not (OpamPackage.Map.is_empty installable)) in
206206+ let installable = OpamPackage.Map.to_list installable |> List.map fst in
207207+ (* Remove the dependency on any installable package from the remaining packages *)
208208+ let pkgs = OpamPackage.Map.map (fun deps -> List.fold_left (fun acc pkg -> OpamPackage.Set.remove pkg acc) deps installable) remainder in
209209+ installable @ topological_sort pkgs
210210+211211+let pkg_deps solution =
212212+ List.fold_left
213213+ (fun map pkg ->
214214+ let deps_direct = OpamPackage.Map.find pkg solution in
215215+ let deps_plus_children = OpamPackage.Set.fold (fun pkg acc -> OpamPackage.Set.union acc (OpamPackage.Map.find pkg map)) deps_direct deps_direct in
216216+ OpamPackage.Map.add pkg deps_plus_children map)
217217+ OpamPackage.Map.empty
218218+219219+(*
220220+let reduce dependencies =
221221+ OpamPackage.Map.map (fun u ->
222222+ OpamPackage.Set.filter
223223+ (fun v ->
224224+ let others = OpamPackage.Set.remove v u in
225225+ OpamPackage.Set.fold (fun o acc -> acc || OpamPackage.Set.mem v (OpamPackage.Map.find o dependencies)) others false |> not)
226226+ u)
227227+*)
228228+229229+let extract_dag dag root =
230230+ let rec loop visited to_visit result =
231231+ match to_visit with
232232+ | [] -> result
233233+ | pkg :: rest -> (
234234+ if OpamPackage.Set.mem pkg visited then
235235+ (* OpamPackage already processed, skip it *)
236236+ loop visited rest result
237237+ else
238238+ (* Mark package as visited *)
239239+ let new_visited = OpamPackage.Set.add pkg visited in
240240+ match OpamPackage.Map.find_opt pkg dag with
241241+ | None ->
242242+ (* OpamPackage not found in the original map, skip it *)
243243+ loop new_visited rest result
244244+ | Some deps ->
245245+ (* Add package and its dependencies to result *)
246246+ let new_result = OpamPackage.Map.add pkg deps result in
247247+ (* Add all dependencies to the work list *)
248248+ let deps_list = OpamPackage.Set.fold (fun dep acc -> dep :: acc) deps [] in
249249+ let new_to_visit = deps_list @ rest in
250250+ loop new_visited new_to_visit new_result)
251251+ in
252252+ loop OpamPackage.Set.empty [ root ] OpamPackage.Map.empty
253253+254254+type build_result =
255255+ | Solution of OpamTypes.package_set OpamTypes.package_map
256256+ | No_solution of string
257257+ | Dependency_failed
258258+ | Failure of string
259259+ | Success of string
260260+261261+let build_result_to_string = function
262262+ | Solution _ -> "solution"
263263+ | No_solution _ -> "no_solution"
264264+ | Dependency_failed -> "dependency_failed"
265265+ | Failure _ -> "failure"
266266+ | Success _ -> "success"
267267+268268+let print_build_result = function
269269+ | Solution _ -> ()
270270+ | No_solution _ -> ()
271271+ | Dependency_failed -> ()
272272+ | Failure _ -> ()
273273+ | Success _ -> ()
274274+275275+let build_layer t pkg build_layer_name ordered_deps ordered_build_hashes =
276276+ let pkg_str = OpamPackage.to_string pkg in
277277+ let pkg_name = OpamPackage.name_to_string pkg in
278278+ let pkg_version = OpamPackage.version_to_string pkg in
279279+ Os.log "build_layer: starting %s (hash=%s)" pkg_str build_layer_name;
280280+ let config = Container.config ~t in
281281+ let layer_dir = Path.(config.dir / Config.os_key ~config / build_layer_name) in
282282+ let layer_json = Path.(layer_dir / "layer.json") in
283283+ let write_layer ~set_temp_log_path target_dir =
284284+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "temp-" "" in
285285+ let build_log = Path.(temp_dir / "build.log") in
286286+ set_temp_log_path build_log;
287287+ let opam_repo = Util.create_opam_repository temp_dir in
288288+ let () =
289289+ List.iter
290290+ (fun pkg ->
291291+ let opam_relative_path = Path.("packages" / OpamPackage.name_to_string pkg / OpamPackage.to_string pkg) in
292292+ List.find_map
293293+ (fun opam_repository ->
294294+ let opam = Path.(opam_repository / opam_relative_path) in
295295+ if Sys.file_exists opam then Some opam else None)
296296+ config.opam_repositories
297297+ |> Option.iter (fun src ->
298298+ let dst = Path.(opam_repo / opam_relative_path) in
299299+ let () = Os.mkdir ~parents:true dst in
300300+ let () = Os.cp Path.(src / "opam") Path.(dst / "opam") in
301301+ let src_files = Path.(src / "files") in
302302+ if Sys.file_exists src_files then
303303+ let dst_files = Path.(dst / "files") in
304304+ let () = Os.mkdir dst_files in
305305+ Sys.readdir src_files |> Array.iter (fun f -> Os.cp Path.(src_files / f) Path.(dst_files / f))))
306306+ (pkg :: ordered_deps)
307307+ in
308308+ let r = Container.build ~t ~temp_dir build_log pkg ordered_build_hashes in
309309+ let () = Os.safe_rename_dir ~marker_file:layer_json temp_dir target_dir in
310310+ (* Scan for files installed by this package (the upperdir contains only new files) *)
311311+ let installed_libs = Util.scan_installed_lib_files ~layer_dir:target_dir in
312312+ let installed_docs = Util.scan_installed_doc_files ~layer_dir:target_dir in
313313+ Util.save_layer_info ~installed_libs ~installed_docs layer_json pkg ordered_deps ordered_build_hashes r;
314314+ (* Create symlink from packages/{pkg} -> ../build-{hash} for easy lookup by package name *)
315315+ Util.ensure_package_layer_symlink ~cache_dir:config.dir ~os_key:(Config.os_key ~config) ~pkg_str ~layer_name:build_layer_name
316316+ in
317317+ let safe_write_layer ~set_temp_log_path target_dir =
318318+ try
319319+ write_layer ~set_temp_log_path target_dir
320320+ with exn ->
321321+ (* Ensure layer.json is created even on failure, so other workers don't wait forever *)
322322+ Os.log "build_layer: FAILED %s - %s" pkg_str (Printexc.to_string exn);
323323+ if not (Sys.file_exists target_dir) then Os.mkdir target_dir;
324324+ let target_layer_json = Path.(target_dir / "layer.json") in
325325+ if not (Sys.file_exists target_layer_json) then
326326+ Util.save_layer_info target_layer_json pkg ordered_deps ordered_build_hashes 1;
327327+ (* Create symlink even for failures so we can look up the failure status *)
328328+ Util.ensure_package_layer_symlink ~cache_dir:config.dir ~os_key:(Config.os_key ~config) ~pkg_str ~layer_name:build_layer_name;
329329+ raise exn
330330+ in
331331+ (* Check layer.json exists, not just the directory - directory might exist from interrupted build *)
332332+ let universe = Odoc_gen.compute_universe_hash ordered_build_hashes in
333333+ let lock_info = Os.{ cache_dir = config.dir; stage = `Build; package = pkg_name; version = pkg_version; universe = Some universe; layer_name = Some build_layer_name } in
334334+ let () =
335335+ if not (Sys.file_exists layer_json) then
336336+ Os.create_directory_exclusively ~marker_file:layer_json ~lock_info layer_dir safe_write_layer
337337+ in
338338+ let () = if config.log then Os.read_from_file Path.(layer_dir / "build.log") |> print_endline in
339339+ (* Wait for layer.json to exist (might be created by another parallel worker) *)
340340+ let rec wait_for_layer_json retries =
341341+ if Sys.file_exists layer_json then ()
342342+ else if retries <= 0 then
343343+ failwith (Printf.sprintf "Build layer %s never completed (layer.json missing)" build_layer_name)
344344+ else begin
345345+ Unix.sleepf 0.5;
346346+ wait_for_layer_json (retries - 1)
347347+ end
348348+ in
349349+ let () = wait_for_layer_json 600 in (* Wait up to 5 minutes *)
350350+ let () = Unix.utimes layer_json 0.0 0.0 in
351351+ (* Ensure symlink exists even if layer was pre-existing from previous run *)
352352+ Util.ensure_package_layer_symlink ~cache_dir:config.dir ~os_key:(Config.os_key ~config) ~pkg_str ~layer_name:build_layer_name;
353353+ let exit_status = Util.load_layer_info_exit_status layer_json in
354354+ match exit_status with
355355+ | 0 -> Success build_layer_name
356356+ | _ -> Failure build_layer_name
357357+358358+(** Build a doc layer for a package.
359359+ Reads installed files from the build layer, runs doc generation,
360360+ and saves doc layer info.
361361+ Returns [Some doc_layer_name] on success, [None] on failure. *)
362362+let doc_layer t pkg build_layer_name dep_doc_hashes ~ocaml_version =
363363+ match ocaml_version with
364364+ | None -> None (* No OCaml version means no docs (e.g., conf-* packages) *)
365365+ | Some ocaml_version ->
366366+ let pkg_str = OpamPackage.to_string pkg in
367367+ let pkg_name = OpamPackage.name_to_string pkg in
368368+ let pkg_version = OpamPackage.version_to_string pkg in
369369+ Os.log "doc_layer: starting %s (build=%s, ocaml=%s)" pkg_str build_layer_name (OpamPackage.to_string ocaml_version);
370370+ let config = Container.config ~t in
371371+ let os_key = Config.os_key ~config in
372372+ let blessed = match config.blessed_map with
373373+ | Some map -> Blessing.is_blessed map pkg
374374+ | None -> false
375375+ in
376376+ let doc_hash = Container.doc_layer_hash ~t ~build_hash:build_layer_name ~dep_doc_hashes ~ocaml_version ~blessed in
377377+ let doc_layer_name = "doc-" ^ doc_hash in
378378+ let doc_layer_dir = Path.(config.dir / os_key / doc_layer_name) in
379379+ let build_layer_dir = Path.(config.dir / os_key / build_layer_name) in
380380+ let doc_layer_json = Path.(doc_layer_dir / "layer.json") in
381381+ (* Compute universe for lock file (same computation as in linux.ml) *)
382382+ let universe = Odoc_gen.compute_universe_hash dep_doc_hashes in
383383+ let write_layer ~set_temp_log_path target_dir =
384384+ (* For doc layers, the log is written to target_dir/odoc-voodoo-all.log *)
385385+ set_temp_log_path (Path.(target_dir / "odoc-voodoo-all.log"));
386386+ (* Read installed files from build layer *)
387387+ let build_layer_json = Path.(build_layer_dir / "layer.json") in
388388+ let installed_libs = Util.load_layer_info_installed_libs build_layer_json in
389389+ let installed_docs = Util.load_layer_info_installed_docs build_layer_json in
390390+ (* Determine doc phase based on extra link deps (post deps + x-extra-doc-deps) *)
391391+ let opamfile = Util.opam_file config.opam_repositories pkg in
392392+ let phase = match opamfile with
393393+ | None -> S.Doc_all
394394+ | Some opam ->
395395+ let extra_link_deps = get_extra_link_deps opam in
396396+ if OpamPackage.Name.Set.is_empty extra_link_deps then S.Doc_all
397397+ else S.Doc_compile_only
398398+ in
399399+ let doc_result =
400400+ Container.generate_docs ~t ~build_layer_dir ~doc_layer_dir:target_dir ~dep_doc_hashes ~pkg ~installed_libs ~installed_docs ~phase ~ocaml_version
401401+ in
402402+ Util.save_doc_layer_info ?doc_result (Path.(target_dir / "layer.json")) pkg ~build_hash:build_layer_name ~dep_doc_hashes
403403+ in
404404+ let safe_write_layer ~set_temp_log_path target_dir =
405405+ (* Create directory first so we can write failure marker if needed *)
406406+ if not (Sys.file_exists target_dir) then Os.mkdir target_dir;
407407+ try
408408+ write_layer ~set_temp_log_path target_dir
409409+ with exn ->
410410+ (* Ensure layer.json is created even on failure, so other workers don't wait forever *)
411411+ let error_msg = Printf.sprintf "Exception during doc generation: %s" (Printexc.to_string exn) in
412412+ Os.log "doc_layer: FAILED %s - %s" pkg_str error_msg;
413413+ let target_layer_json = Path.(target_dir / "layer.json") in
414414+ if not (Sys.file_exists target_layer_json) then
415415+ Util.save_doc_layer_info ~doc_result:(Odoc_gen.doc_result_to_yojson (Odoc_gen.Doc_failure error_msg)) target_layer_json pkg ~build_hash:build_layer_name ~dep_doc_hashes;
416416+ raise exn
417417+ in
418418+ let lock_info = Os.{ cache_dir = config.dir; stage = `Doc; package = pkg_name; version = pkg_version; universe = Some universe; layer_name = Some doc_layer_name } in
419419+ let () =
420420+ if not (Sys.file_exists doc_layer_json) then
421421+ Os.create_directory_exclusively ~marker_file:doc_layer_json ~lock_info doc_layer_dir safe_write_layer
422422+ in
423423+ (* Wait for layer.json to exist (might be created by another parallel worker) *)
424424+ let rec wait_for_layer_json retries =
425425+ if Sys.file_exists doc_layer_json then ()
426426+ else if retries <= 0 then
427427+ failwith (Printf.sprintf "Doc layer %s never completed (layer.json missing)" doc_layer_name)
428428+ else begin
429429+ Unix.sleepf 0.5;
430430+ wait_for_layer_json (retries - 1)
431431+ end
432432+ in
433433+ let () = wait_for_layer_json 600 in (* Wait up to 5 minutes *)
434434+ let () = Unix.utimes doc_layer_json 0.0 0.0 in
435435+ (* Check if doc generation failed *)
436436+ if Util.load_layer_info_doc_failed doc_layer_json then
437437+ None
438438+ else begin
439439+ (* Create symlink for this doc layer *)
440440+ Util.ensure_package_layer_symlink ~cache_dir:config.dir ~os_key ~pkg_str ~layer_name:doc_layer_name;
441441+ (* If blessed, create blessed-build and blessed-docs symlinks *)
442442+ if blessed then begin
443443+ Util.ensure_package_blessed_symlink ~cache_dir:config.dir ~os_key ~pkg_str ~kind:`Build ~layer_name:build_layer_name;
444444+ Util.ensure_package_blessed_symlink ~cache_dir:config.dir ~os_key ~pkg_str ~kind:`Docs ~layer_name:doc_layer_name
445445+ end;
446446+ Some doc_layer_name
447447+ end
448448+449449+(** Build a jtw layer for a package.
450450+ Compiles .cma to .cma.js, copies .cmi and META, generates dynamic_cmis.json.
451451+ Returns [Some jtw_layer_name] on success, [None] on failure. *)
452452+let jtw_layer t pkg build_layer_name dep_build_hashes ~ocaml_version =
453453+ match ocaml_version with
454454+ | None -> None (* No OCaml version means no jtw *)
455455+ | Some ocaml_version ->
456456+ let pkg_str = OpamPackage.to_string pkg in
457457+ let pkg_name = OpamPackage.name_to_string pkg in
458458+ let pkg_version = OpamPackage.version_to_string pkg in
459459+ Os.log "jtw_layer: starting %s (build=%s, ocaml=%s)" pkg_str build_layer_name (OpamPackage.to_string ocaml_version);
460460+ let config = Container.config ~t in
461461+ let os_key = Config.os_key ~config in
462462+ let jtw_hash = Container.jtw_layer_hash ~t ~build_hash:build_layer_name ~ocaml_version in
463463+ let jtw_layer_name = "jtw-" ^ jtw_hash in
464464+ let jtw_layer_dir = Path.(config.dir / os_key / jtw_layer_name) in
465465+ let build_layer_dir = Path.(config.dir / os_key / build_layer_name) in
466466+ let jtw_layer_json = Path.(jtw_layer_dir / "layer.json") in
467467+ let universe = Odoc_gen.compute_universe_hash dep_build_hashes in
468468+ let write_layer ~set_temp_log_path target_dir =
469469+ set_temp_log_path (Path.(target_dir / "jtw.log"));
470470+ let build_layer_json = Path.(build_layer_dir / "layer.json") in
471471+ let installed_libs = Util.load_layer_info_installed_libs build_layer_json in
472472+ let jtw_result =
473473+ Container.generate_jtw ~t ~build_layer_dir ~jtw_layer_dir:target_dir ~dep_build_hashes ~pkg ~installed_libs ~ocaml_version
474474+ in
475475+ Jtw_gen.save_jtw_layer_info ?jtw_result (Path.(target_dir / "layer.json")) pkg ~build_hash:build_layer_name
476476+ in
477477+ let safe_write_layer ~set_temp_log_path target_dir =
478478+ if not (Sys.file_exists target_dir) then Os.mkdir target_dir;
479479+ try
480480+ write_layer ~set_temp_log_path target_dir
481481+ with exn ->
482482+ Os.log "jtw_layer: FAILED %s - %s" pkg_str (Printexc.to_string exn);
483483+ let target_layer_json = Path.(target_dir / "layer.json") in
484484+ if not (Sys.file_exists target_layer_json) then
485485+ Jtw_gen.save_jtw_layer_info ~jtw_result:(Jtw_gen.jtw_result_to_yojson (Jtw_gen.Jtw_failure (Printexc.to_string exn))) target_layer_json pkg ~build_hash:build_layer_name;
486486+ raise exn
487487+ in
488488+ let lock_info = Os.{ cache_dir = config.dir; stage = `Build; package = pkg_name; version = pkg_version; universe = Some universe; layer_name = Some jtw_layer_name } in
489489+ let () =
490490+ if not (Sys.file_exists jtw_layer_json) then
491491+ Os.create_directory_exclusively ~marker_file:jtw_layer_json ~lock_info jtw_layer_dir safe_write_layer
492492+ in
493493+ (* Wait for layer.json *)
494494+ let rec wait_for_layer_json retries =
495495+ if Sys.file_exists jtw_layer_json then ()
496496+ else if retries <= 0 then
497497+ failwith (Printf.sprintf "JTW layer %s never completed (layer.json missing)" jtw_layer_name)
498498+ else begin
499499+ Unix.sleepf 0.5;
500500+ wait_for_layer_json (retries - 1)
501501+ end
502502+ in
503503+ let () = wait_for_layer_json 600 in
504504+ let () = Unix.utimes jtw_layer_json 0.0 0.0 in
505505+ (* Create symlink *)
506506+ Util.ensure_package_layer_symlink ~cache_dir:config.dir ~os_key ~pkg_str ~layer_name:jtw_layer_name;
507507+ Some jtw_layer_name
508508+509509+let build config package =
510510+ match solve config package with
511511+ | Ok solution ->
512512+ let () = if config.log then Dot_solution.to_string solution |> print_endline in
513513+ let () = Option.iter (fun filename -> Dot_solution.save filename solution) config.dot in
514514+ let t = Container.init ~config in
515515+ init t;
516516+ let ordered_installation = topological_sort solution in
517517+ let dependencies = pkg_deps solution ordered_installation in
518518+ (* Extract OCaml version from solution - will be used for doc tools *)
519519+ let ocaml_version = extract_ocaml_version solution in
520520+ let all_layers_exist =
521521+ if config.dry_run then
522522+ let rec check_all prev_success = function
523523+ | [] -> true
524524+ | pkg :: rest ->
525525+ let ordered_deps = extract_dag dependencies pkg |> topological_sort |> List.rev |> List.tl in
526526+ let hash = Container.layer_hash ~t (pkg :: ordered_deps) in
527527+ let build_layer_name = "build-" ^ hash in
528528+ let layer_dir = Path.(config.dir / Config.os_key ~config / build_layer_name) in
529529+ let layer_json = Path.(layer_dir / "layer.json") in
530530+ let layer_exists = Sys.file_exists layer_dir in
531531+ if layer_exists then
532532+ let exit_status = Util.load_layer_info_exit_status layer_json in
533533+ check_all (prev_success && exit_status = 0) rest
534534+ else if prev_success then false
535535+ else check_all false rest
536536+ in
537537+ check_all true ordered_installation
538538+ else false
539539+ in
540540+ if config.dry_run && not all_layers_exist then (
541541+ Container.deinit ~t;
542542+ [ Solution solution ]
543543+ )
544544+ else
545545+546546+ (* Track packages that need deferred doc linking (have post deps) *)
547547+ let deferred_doc_link = ref [] in
548548+549549+ (* Three accumulators: results, build_map (pkg -> build_result), doc_map (pkg -> doc_layer_name) *)
550550+ let results, _, doc_map =
551551+ List.fold_left
552552+ (fun (res, bm, dm) pkg ->
553553+ let ordered_deps = extract_dag dependencies pkg |> topological_sort |> List.rev |> List.tl in
554554+ let ordered_build_hashes =
555555+ List.filter_map
556556+ (fun p ->
557557+ match OpamPackage.Map.find p bm with
558558+ | Success h
559559+ | Failure h ->
560560+ Some h
561561+ | _ -> None)
562562+ ordered_deps
563563+ in
564564+ let hash = Container.layer_hash ~t (pkg :: ordered_deps) in
565565+ let build_layer_name = "build-" ^ hash in
566566+ let do_build () =
567567+ let r = build_layer t pkg build_layer_name ordered_deps ordered_build_hashes in
568568+ (* If build succeeded and with_doc, create doc layer *)
569569+ let r, dm =
570570+ if config.with_doc then
571571+ match r with
572572+ | Success _ ->
573573+ let dep_doc_hashes = List.filter_map (fun p -> OpamPackage.Map.find_opt p dm) ordered_deps in
574574+ (match doc_layer t pkg build_layer_name dep_doc_hashes ~ocaml_version with
575575+ | Some doc_name ->
576576+ (* Track packages with extra link deps (post deps + x-extra-doc-deps) for deferred doc linking *)
577577+ let opamfile = Util.opam_file config.opam_repositories pkg in
578578+ (match opamfile with
579579+ | Some opam when not (OpamPackage.Name.Set.is_empty (get_extra_link_deps opam)) ->
580580+ deferred_doc_link := (pkg, build_layer_name, doc_name) :: !deferred_doc_link
581581+ | _ -> ());
582582+ (r, OpamPackage.Map.add pkg doc_name dm)
583583+ | None ->
584584+ (* Doc generation failed - treat as failure when --with-doc *)
585585+ (Failure build_layer_name, dm))
586586+ | _ -> (r, dm)
587587+ else (r, dm)
588588+ in
589589+ (* If build succeeded and with_jtw, create jtw layer *)
590590+ let () =
591591+ if config.with_jtw then
592592+ match r with
593593+ | Success _ ->
594594+ ignore (jtw_layer t pkg build_layer_name ordered_build_hashes ~ocaml_version)
595595+ | _ -> ()
596596+ in
597597+ (r, dm)
598598+ in
599599+ match res with
600600+ | [] ->
601601+ let r, dm = do_build () in
602602+ ([ r ], OpamPackage.Map.add pkg r bm, dm)
603603+ | Success _ :: _ ->
604604+ let r, dm = do_build () in
605605+ (r :: res, OpamPackage.Map.add pkg r bm, dm)
606606+ | _ ->
607607+ (Dependency_failed :: res, OpamPackage.Map.add pkg Dependency_failed bm, dm))
608608+ ([], OpamPackage.Map.empty, OpamPackage.Map.empty) ordered_installation
609609+ in
610610+611611+ (* Run deferred doc link phase for packages with extra link deps (post deps + x-extra-doc-deps) *)
612612+ let () =
613613+ if config.with_doc && not (List.is_empty !deferred_doc_link) then begin
614614+ let os_key = Config.os_key ~config in
615615+ (* Build a map of package name to package in solution for looking up x-extra-doc-deps *)
616616+ let solution_by_name =
617617+ OpamPackage.Map.fold (fun pkg _ acc ->
618618+ OpamPackage.Name.Map.add (OpamPackage.name pkg) pkg acc
619619+ ) doc_map OpamPackage.Name.Map.empty
620620+ in
621621+ List.iter (fun (pkg, build_layer_name, doc_layer_name) ->
622622+ let build_layer_dir = Path.(config.dir / os_key / build_layer_name) in
623623+ let doc_layer_dir = Path.(config.dir / os_key / doc_layer_name) in
624624+ (* Get updated dep_doc_hashes including post deps now available *)
625625+ let ordered_deps = extract_dag dependencies pkg |> topological_sort |> List.rev |> List.tl in
626626+ let base_dep_doc_hashes = List.filter_map (fun p -> OpamPackage.Map.find_opt p doc_map) ordered_deps in
627627+ (* Also include x-extra-doc-deps doc hashes *)
628628+ let opamfile = Util.opam_file config.opam_repositories pkg in
629629+ let extra_doc_dep_hashes = match opamfile with
630630+ | None -> []
631631+ | Some opam ->
632632+ let extra_doc_deps = Odoc_gen.get_extra_doc_deps opam in
633633+ if not (OpamPackage.Name.Set.is_empty extra_doc_deps) then
634634+ Os.log "deferred_doc_link: %s has x-extra-doc-deps: [%s]"
635635+ (OpamPackage.to_string pkg)
636636+ (OpamPackage.Name.Set.to_string extra_doc_deps);
637637+ OpamPackage.Name.Set.fold (fun name acc ->
638638+ match OpamPackage.Name.Map.find_opt name solution_by_name with
639639+ | Some extra_pkg ->
640640+ (match OpamPackage.Map.find_opt extra_pkg doc_map with
641641+ | Some doc_hash ->
642642+ Os.log "deferred_doc_link: including doc hash for %s -> %s"
643643+ (OpamPackage.to_string extra_pkg) doc_hash;
644644+ doc_hash :: acc
645645+ | None ->
646646+ Os.log "deferred_doc_link: warning - %s has no doc layer"
647647+ (OpamPackage.to_string extra_pkg);
648648+ acc)
649649+ | None ->
650650+ Os.log "deferred_doc_link: warning - x-extra-doc-dep %s not in solution"
651651+ (OpamPackage.Name.to_string name);
652652+ acc
653653+ ) extra_doc_deps []
654654+ in
655655+ let dep_doc_hashes = base_dep_doc_hashes @ extra_doc_dep_hashes in
656656+ let build_layer_json = Path.(build_layer_dir / "layer.json") in
657657+ let installed_libs = Util.load_layer_info_installed_libs build_layer_json in
658658+ let installed_docs = Util.load_layer_info_installed_docs build_layer_json in
659659+ Option.iter (fun ocaml_version ->
660660+ let _doc_result =
661661+ Container.generate_docs ~t ~build_layer_dir ~doc_layer_dir ~dep_doc_hashes ~pkg ~installed_libs ~installed_docs ~phase:S.Doc_link_only ~ocaml_version
662662+ in
663663+ ()) ocaml_version
664664+ ) !deferred_doc_link
665665+ end
666666+ in
667667+668668+ (* Prune layers if requested *)
669669+ let () =
670670+ if config.prune_layers then begin
671671+ let os_key = Config.os_key ~config in
672672+ List.iter (fun (res, pkg) ->
673673+ let is_target = String.equal (OpamPackage.to_string pkg) config.package in
674674+ if is_target then
675675+ match res with
676676+ | Success build_layer_name ->
677677+ let html_exists = match config.html_output with
678678+ | Some html_output ->
679679+ let pkg_doc_dir = Path.(html_output / "p" / OpamPackage.name_to_string pkg / OpamPackage.version_to_string pkg) in
680680+ Sys.file_exists pkg_doc_dir
681681+ | None -> false
682682+ in
683683+ if html_exists then begin
684684+ Printf.printf "Pruning build layer for %s\n%!" (OpamPackage.to_string pkg);
685685+ ignore (Os.sudo ["rm"; "-rf"; Path.(config.dir / os_key / build_layer_name)]);
686686+ (* Also prune the doc layer if it exists *)
687687+ (match OpamPackage.Map.find_opt pkg doc_map with
688688+ | Some doc_name ->
689689+ Printf.printf "Pruning doc layer for %s\n%!" (OpamPackage.to_string pkg);
690690+ ignore (Os.sudo ["rm"; "-rf"; Path.(config.dir / os_key / doc_name)])
691691+ | None -> ())
692692+ end
693693+ | _ -> ()
694694+ ) (List.combine (List.rev results) ordered_installation)
695695+ end
696696+ in
697697+698698+ Container.deinit ~t;
699699+ results @ [ Solution solution ]
700700+ | Error s ->
701701+ let () = if config.log then print_endline s in
702702+ [ No_solution s ]
703703+704704+open Cmdliner
705705+706706+let run_list (config : Config.t) all_versions =
707707+ let () = Random.self_init () in
708708+ let all_packages =
709709+ List.fold_left
710710+ (fun set opam_repository ->
711711+ let packages = Path.(opam_repository / "packages") in
712712+ Array.fold_left
713713+ (fun acc name ->
714714+ Filename.concat packages name |> Sys.readdir
715715+ |> Array.fold_left
716716+ (fun acc package ->
717717+ if package.[0] = '.' then acc
718718+ else
719719+ let pkg = OpamPackage.of_string package in
720720+ let opam = Path.(packages / name / package / "opam") |> OpamFilename.raw |> OpamFile.make |> OpamFile.OPAM.read in
721721+ match OpamFilter.eval_to_bool ~default:false (opam_env ~config pkg) (OpamFile.OPAM.available opam) with
722722+ | true -> OpamPackage.Set.add pkg acc
723723+ | false -> acc)
724724+ acc)
725725+ set (Sys.readdir packages))
726726+ OpamPackage.Set.empty config.opam_repositories
727727+ in
728728+ let packages_to_show =
729729+ if all_versions then all_packages
730730+ else
731731+ OpamPackage.Name.Map.fold
732732+ (fun n vset base -> OpamPackage.Set.add (OpamPackage.create n (OpamPackage.Version.Set.max_elt vset)) base)
733733+ (OpamPackage.to_map all_packages) OpamPackage.Set.empty
734734+ in
735735+ let package_list =
736736+ packages_to_show
737737+ |> OpamPackage.Set.to_list_map (fun x -> (Random.bits (), x))
738738+ |> List.sort compare |> List.map snd
739739+ |> List.map OpamPackage.to_string
740740+ in
741741+ List.iter print_endline package_list;
742742+ Option.iter (fun filename -> Json_packages.write_packages filename package_list) config.json
743743+744744+let output (config : Config.t) results =
745745+ let os_key = Config.os_key ~config in
746746+ let opam_repo_sha () =
747747+ List.map
748748+ (fun opam_repository ->
749749+ let cmd = Printf.sprintf "git -C %s rev-parse HEAD" opam_repository in
750750+ Os.run cmd |> String.trim)
751751+ config.opam_repositories
752752+ |> String.concat ""
753753+ in
754754+ let () =
755755+ Option.iter
756756+ (fun filename ->
757757+ let oc = open_out_bin filename in
758758+ let () = Printf.fprintf oc "---\nstatus: %s\ncommit: %s\npackage: %s\n---\n" (build_result_to_string (List.hd results)) (opam_repo_sha ()) config.package in
759759+ let () =
760760+ List.rev results
761761+ |> List.iter (function
762762+ | Solution solution ->
763763+ Printf.fprintf oc "\n# Solution\n\n";
764764+ output_string oc (Dot_solution.to_string solution)
765765+ | Success hash
766766+ | Failure hash ->
767767+ let package = Util.load_layer_info_package_name Path.(config.dir / os_key / hash / "layer.json") in
768768+ Printf.fprintf oc "\n# %s\n\n" package;
769769+ let build_log = Os.read_from_file Path.(config.dir / os_key / hash / "build.log") in
770770+ output_string oc build_log
771771+ | No_solution log -> output_string oc log
772772+ | _ -> ())
773773+ in
774774+ close_out oc)
775775+ config.md
776776+ in
777777+ let () =
778778+ Option.iter
779779+ (fun filename ->
780780+ let hash =
781781+ List.find_map
782782+ (function
783783+ | Success hash
784784+ | Failure hash ->
785785+ Some hash
786786+ | _ -> None)
787787+ results
788788+ in
789789+ let solution =
790790+ List.find_map
791791+ (function
792792+ | Solution s -> Some (Dot_solution.to_string s)
793793+ | No_solution s -> Some s
794794+ | _ -> None)
795795+ results
796796+ in
797797+ let j =
798798+ `Assoc
799799+ ([ ("name", `String config.package); ("status", `String (build_result_to_string (List.hd results))); ("sha", `String (opam_repo_sha ())) ]
800800+ @ Option.fold ~none:[]
801801+ ~some:(fun hash ->
802802+ let build_log = Os.read_from_file Path.(config.dir / os_key / hash / "build.log") in
803803+ [ ("layer", `String hash); ("log", `String build_log) ])
804804+ hash
805805+ @ Option.fold ~none:[] ~some:(fun s -> [ ("solution", `String s) ]) solution)
806806+ in
807807+ Yojson.Safe.to_file filename j)
808808+ config.json
809809+ in
810810+ let () =
811811+ Option.iter
812812+ (fun tag ->
813813+ let layers =
814814+ List.filter_map
815815+ (function
816816+ | Success hash
817817+ | Failure hash ->
818818+ Some hash
819819+ | _ -> None)
820820+ results
821821+ in
822822+ let () = Printf.printf "Importing layers into Docker with tag: %s\n%!" tag in
823823+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "docker-import-" "" in
824824+ let cp s d = [ "cp"; "-n"; "--archive"; "--no-dereference"; "--recursive"; "--link"; "--no-target-directory"; s; d ] in
825825+ let () =
826826+ List.iter
827827+ (fun hash ->
828828+ let layer_dir = Path.(config.dir / os_key / hash / "fs") in
829829+ let _ = Os.sudo (cp layer_dir temp_dir) in
830830+ ())
831831+ (layers @ [ "base" ])
832832+ in
833833+ let () =
834834+ match layers with
835835+ | hash :: _ ->
836836+ let opam_repo_src = Path.(config.dir / os_key / hash / "opam-repository") in
837837+ let opam_repo_dst = Path.(temp_dir / "home" / "opam" / ".opam" / "repo" / "default") in
838838+ let _ = Os.sudo (cp opam_repo_src opam_repo_dst) in
839839+ ()
840840+ | _ -> ()
841841+ in
842842+ let () = Os.run (String.concat " " [ "sudo"; "tar"; "-C"; temp_dir; "-c"; "."; "|"; "docker"; "import"; "-"; tag ]) |> print_string in
843843+ let _ = Os.sudo [ "rm"; "-rf"; temp_dir ] in
844844+ ())
845845+ config.tag
846846+ in
847847+ print_build_result (List.hd results)
848848+849849+let run_ci (config : Config.t) =
850850+ let package = OpamPackage.of_string (config.package ^ ".dev") in
851851+ let results = build config package in
852852+ output config results
853853+854854+let run_health_check (config : Config.t) =
855855+ let package = OpamPackage.of_string config.package in
856856+ let results = build config package in
857857+ output config results
858858+859859+let run_health_check_multi (config : Config.t) package_arg =
860860+ match package_arg.[0] = '@' with
861861+ | false ->
862862+ (* Single package: use paths as-is (files, not directories) *)
863863+ let config = { config with package = package_arg } in
864864+ run_health_check config
865865+ | true ->
866866+ let filename = String.sub package_arg 1 (String.length package_arg - 1) in
867867+ let packages = Json_packages.read_packages filename in
868868+ (* Multiple packages: treat paths as directories *)
869869+ let () = Option.iter (fun path -> Os.mkdir ~parents:true path) config.json in
870870+ let () = Option.iter (fun path -> Os.mkdir ~parents:true path) config.md in
871871+ let () = Option.iter (fun path -> Os.mkdir ~parents:true path) config.dot in
872872+ let run_with_package pkg_name =
873873+ let json = Option.map (fun path -> Path.(path / pkg_name ^ ".json")) config.json in
874874+ let md = Option.map (fun path -> Path.(path / pkg_name ^ ".md")) config.md in
875875+ let dot = Option.map (fun path -> Path.(path / pkg_name ^ ".dot")) config.dot in
876876+ let config = { config with package = pkg_name; json; md; dot } in
877877+ run_health_check config
878878+ in
879879+ match config.fork with
880880+ | Some 1
881881+ | None -> List.iter run_with_package packages
882882+ | Some n -> Os.fork ~np:n run_with_package packages
883883+884884+(** Run deferred doc link pass for packages with x-extra-doc-deps across all built packages.
885885+ This is used in batch mode after all targets are built, to link packages whose
886886+ x-extra-doc-deps were not available during the initial doc generation. *)
887887+let run_global_deferred_doc_link (config : Config.t) =
888888+ if not config.with_doc then ()
889889+ else begin
890890+ let os_key = Config.os_key ~config in
891891+ let layer_dir = Path.(config.dir / os_key) in
892892+ let t = Container.init ~config in
893893+894894+ (* Build a map of package name -> (package, doc_layer_dir, doc_hash) for all doc layers *)
895895+ let doc_layers_by_name =
896896+ let layers = ref OpamPackage.Name.Map.empty in
897897+ (try
898898+ Sys.readdir layer_dir |> Array.iter (fun name ->
899899+ if String.length name > 4 && String.sub name 0 4 = "doc-" then begin
900900+ let layer_json = Path.(layer_dir / name / "layer.json") in
901901+ if Sys.file_exists layer_json then
902902+ try
903903+ let json = Yojson.Safe.from_file layer_json in
904904+ let open Yojson.Safe.Util in
905905+ let pkg_str = json |> member "package" |> to_string in
906906+ let pkg = OpamPackage.of_string pkg_str in
907907+ layers := OpamPackage.Name.Map.add (OpamPackage.name pkg) (pkg, Path.(layer_dir / name), name) !layers
908908+ with _ -> ()
909909+ end
910910+ )
911911+ with _ -> ());
912912+ !layers
913913+ in
914914+915915+ (* Find packages with x-extra-doc-deps that need re-linking *)
916916+ let packages_to_relink =
917917+ OpamPackage.Name.Map.fold (fun _name (pkg, doc_layer_dir, doc_hash) acc ->
918918+ let opamfile = Util.opam_file config.opam_repositories pkg in
919919+ match opamfile with
920920+ | None -> acc
921921+ | Some opam ->
922922+ let extra_doc_deps = Odoc_gen.get_extra_doc_deps opam in
923923+ if OpamPackage.Name.Set.is_empty extra_doc_deps then acc
924924+ else begin
925925+ (* Check if at least one x-extra-doc-dep has a doc layer now.
926926+ We re-link even with partial deps - better to have some cross-links than none. *)
927927+ let any_dep_available = OpamPackage.Name.Set.exists (fun dep_name ->
928928+ OpamPackage.Name.Map.mem dep_name doc_layers_by_name
929929+ ) extra_doc_deps in
930930+ if any_dep_available then begin
931931+ (* Check if link phase was already done (log file exists and is > 1KB) *)
932932+ let link_log = Path.(doc_layer_dir / "odoc-voodoo-link-and-gen.log") in
933933+ let needs_relink =
934934+ if Sys.file_exists link_log then
935935+ let st = Unix.stat link_log in
936936+ st.Unix.st_size < 1000 (* Small log suggests failure *)
937937+ else
938938+ false (* No log means compile-only wasn't run, skip *)
939939+ in
940940+ if needs_relink then
941941+ (pkg, doc_layer_dir, doc_hash, extra_doc_deps) :: acc
942942+ else acc
943943+ end else acc
944944+ end
945945+ ) doc_layers_by_name []
946946+ in
947947+948948+ if packages_to_relink <> [] then begin
949949+ Os.log "global_deferred_doc_link: Re-linking %d packages with x-extra-doc-deps" (List.length packages_to_relink);
950950+951951+ List.iter (fun (pkg, doc_layer_dir, _doc_hash, extra_doc_deps) ->
952952+ Os.log "global_deferred_doc_link: Processing %s (extra deps: %s)"
953953+ (OpamPackage.to_string pkg)
954954+ (OpamPackage.Name.Set.to_string extra_doc_deps);
955955+956956+ (* Find build layer from the doc layer's layer.json *)
957957+ let layer_json = Path.(doc_layer_dir / "layer.json") in
958958+ let json = Yojson.Safe.from_file layer_json in
959959+ let open Yojson.Safe.Util in
960960+ let build_layer_name = json |> member "build_hash" |> to_string in
961961+ let build_layer_dir = Path.(layer_dir / build_layer_name) in
962962+ let build_layer_json = Path.(build_layer_dir / "layer.json") in
963963+964964+ (* Get installed files from build layer *)
965965+ let installed_libs = Util.load_layer_info_installed_libs build_layer_json in
966966+ let installed_docs = Util.load_layer_info_installed_docs build_layer_json in
967967+968968+ (* Get base dep_doc_hashes from original layer.json *)
969969+ let base_dep_doc_hashes = json |> member "dep_doc_hashes" |> to_list |> List.map to_string in
970970+971971+ (* Add x-extra-doc-deps doc hashes *)
972972+ let extra_dep_hashes = OpamPackage.Name.Set.fold (fun dep_name acc ->
973973+ match OpamPackage.Name.Map.find_opt dep_name doc_layers_by_name with
974974+ | Some (_, _, doc_hash) -> doc_hash :: acc
975975+ | None -> acc
976976+ ) extra_doc_deps [] in
977977+978978+ let dep_doc_hashes = base_dep_doc_hashes @ extra_dep_hashes in
979979+980980+ (* Extract OCaml version from solution - look for it in the doc layers *)
981981+ let ocaml_version =
982982+ OpamPackage.Name.Map.fold (fun name (pkg, _, _) acc ->
983983+ match acc with
984984+ | Some _ -> acc
985985+ | None ->
986986+ let name_str = OpamPackage.Name.to_string name in
987987+ if name_str = "ocaml-base-compiler" || name_str = "ocaml-variants" then
988988+ Some pkg
989989+ else acc
990990+ ) doc_layers_by_name None
991991+ in
992992+993993+ match ocaml_version with
994994+ | None ->
995995+ Os.log "global_deferred_doc_link: Could not find OCaml version for %s, skipping" (OpamPackage.to_string pkg)
996996+ | Some ocaml_version ->
997997+ Os.log "global_deferred_doc_link: Running link-only for %s with %d dep hashes"
998998+ (OpamPackage.to_string pkg) (List.length dep_doc_hashes);
999999+ let _doc_result =
10001000+ Container.generate_docs ~t ~build_layer_dir ~doc_layer_dir ~dep_doc_hashes ~pkg ~installed_libs ~installed_docs ~phase:S.Doc_link_only ~ocaml_version
10011001+ in
10021002+ ()
10031003+ ) packages_to_relink
10041004+ end
10051005+ end
10061006+10071007+(** Collect all layer names that should be kept based on current solutions.
10081008+ A layer is referenced if its package is in any of the solutions. *)
10091009+let collect_referenced_layer_names ~config ~solutions =
10101010+ let os_key = Config.os_key ~config in
10111011+ let layer_dir = Path.(config.dir / os_key) in
10121012+10131013+ (* Collect all packages from all solutions *)
10141014+ let all_packages = List.fold_left (fun acc (_target, solution) ->
10151015+ OpamPackage.Map.fold (fun pkg _ set -> OpamPackage.Set.add pkg set) solution acc
10161016+ ) OpamPackage.Set.empty solutions in
10171017+10181018+ (* Scan layer.json files and collect layers whose packages are in solutions *)
10191019+ let layers = ref [] in
10201020+ (try
10211021+ Sys.readdir layer_dir |> Array.iter (fun name ->
10221022+ let layer_json = Path.(layer_dir / name / "layer.json") in
10231023+ if Sys.file_exists layer_json then
10241024+ try
10251025+ let json = Yojson.Safe.from_file layer_json in
10261026+ let open Yojson.Safe.Util in
10271027+ let pkg_str = json |> member "package" |> to_string in
10281028+ let pkg = OpamPackage.of_string pkg_str in
10291029+ if OpamPackage.Set.mem pkg all_packages then
10301030+ layers := name :: !layers
10311031+ with _ -> ()
10321032+ )
10331033+ with _ -> ());
10341034+ !layers
10351035+10361036+(** Run garbage collection for layers and universes after batch processing. *)
10371037+let run_gc ~config ~solutions =
10381038+ let os_key = Config.os_key ~config in
10391039+ let referenced_layer_names = collect_referenced_layer_names ~config ~solutions in
10401040+10411041+ Printf.printf "Phase 4: Running garbage collection...\n%!";
10421042+10431043+ (* Run layer GC *)
10441044+ let layer_result = Day10_lib.Gc.gc_layers ~cache_dir:config.dir ~os_key ~referenced_hashes:referenced_layer_names in
10451045+ Printf.printf " Layers: %d referenced, %d deleted, %d special kept\n%!"
10461046+ layer_result.referenced layer_result.deleted (List.length layer_result.kept);
10471047+10481048+ (* Run universe GC if html_output is specified *)
10491049+ match config.html_output with
10501050+ | Some html_dir ->
10511051+ let universe_result = Day10_lib.Gc.gc_universes ~html_dir in
10521052+ Printf.printf " Universes: %d referenced, %d deleted\n%!"
10531053+ universe_result.referenced universe_result.deleted
10541054+ | None -> ()
10551055+10561056+let run_batch (config : Config.t) package_arg =
10571057+ let packages =
10581058+ if String.length package_arg > 0 && package_arg.[0] = '@' then
10591059+ let filename = String.sub package_arg 1 (String.length package_arg - 1) in
10601060+ Json_packages.read_packages filename
10611061+ else
10621062+ [ package_arg ]
10631063+ in
10641064+ if packages = [] then begin
10651065+ Printf.eprintf "No packages to process\n%!";
10661066+ exit 1
10671067+ end;
10681068+10691069+ (* Set up per-PID logging *)
10701070+ let log_dir = Path.(config.dir / "logs") in
10711071+ Os.set_log_dir log_dir;
10721072+10731073+ (* Start run logging *)
10741074+ Day10_lib.Run_log.set_log_base_dir log_dir;
10751075+ let run_info = Day10_lib.Run_log.start_run () in
10761076+10771077+ (* Clean up stale .new/.old directories from interrupted swaps *)
10781078+ (match config.html_output with
10791079+ | Some html_dir -> Os.Atomic_swap.cleanup_stale_dirs ~html_dir
10801080+ | None -> ());
10811081+10821082+ (* Clean up stale lock files from crashed/interrupted runs *)
10831083+ Day10_lib.Build_lock.cleanup_stale ~cache_dir:config.dir;
10841084+10851085+ (* Get opam-repository commit hash for solution caching *)
10861086+ let opam_repo_sha =
10871087+ List.map
10881088+ (fun opam_repository ->
10891089+ let cmd = Printf.sprintf "git -C %s rev-parse --short HEAD" opam_repository in
10901090+ Os.run cmd |> String.trim)
10911091+ config.opam_repositories
10921092+ |> String.concat "-"
10931093+ in
10941094+ let solutions_cache_dir = Path.(config.dir / "solutions" / opam_repo_sha) in
10951095+ Os.mkdir ~parents:true solutions_cache_dir;
10961096+10971097+ (* Phase 1: Solve all targets (with caching) *)
10981098+ let cached_count = try Array.length (Sys.readdir solutions_cache_dir) with _ -> 0 in
10991099+ Printf.printf "Phase 1: Solving %d targets (cache: %s, %d cached)...\n%!" (List.length packages) opam_repo_sha cached_count;
11001100+ let temp_dir = Os.temp_dir ~perms:0o755 ~parent_dir:config.dir "solve-" "" in
11011101+ let serialize (pkg, solution) =
11021102+ Yojson.Safe.to_string (`Assoc [
11031103+ ("package", `String (OpamPackage.to_string pkg));
11041104+ ("solution", Util.solution_to_json solution)
11051105+ ])
11061106+ in
11071107+ let deserialize str =
11081108+ let open Yojson.Safe.Util in
11091109+ let json = Yojson.Safe.from_string str in
11101110+ let pkg = json |> member "package" |> to_string |> OpamPackage.of_string in
11111111+ let solution = json |> member "solution" |> Util.solution_of_json in
11121112+ (pkg, solution)
11131113+ in
11141114+ let solve_one pkg_name =
11151115+ let package = OpamPackage.of_string pkg_name in
11161116+ let cache_file = Path.(solutions_cache_dir / pkg_name ^ ".json") in
11171117+ (* Check cache first *)
11181118+ if Sys.file_exists cache_file then begin
11191119+ try
11201120+ let json = Yojson.Safe.from_string (Os.read_from_file cache_file) in
11211121+ let open Yojson.Safe.Util in
11221122+ (* Check if this is a cached failure *)
11231123+ if json |> member "failed" |> to_bool_option = Some true then
11241124+ None (* Cached failure, skip silently *)
11251125+ else
11261126+ Some (deserialize (Yojson.Safe.to_string json))
11271127+ with _ ->
11281128+ (* Cache file corrupted, re-solve *)
11291129+ let pkg_config = { config with package = pkg_name } in
11301130+ match solve pkg_config package with
11311131+ | Ok solution ->
11321132+ Os.write_to_file cache_file (serialize (package, solution));
11331133+ Some (package, solution)
11341134+ | Error msg ->
11351135+ Os.write_to_file cache_file (Yojson.Safe.to_string (`Assoc [("failed", `Bool true); ("error", `String msg)]));
11361136+ None
11371137+ end else begin
11381138+ let pkg_config = { config with package = pkg_name } in
11391139+ match solve pkg_config package with
11401140+ | Ok solution ->
11411141+ Printf.printf " Solved %s (%d packages)\n%!" pkg_name (OpamPackage.Map.cardinal solution);
11421142+ Os.write_to_file cache_file (serialize (package, solution));
11431143+ Some (package, solution)
11441144+ | Error msg ->
11451145+ Os.write_to_file cache_file (Yojson.Safe.to_string (`Assoc [("failed", `Bool true); ("error", `String msg)]));
11461146+ None
11471147+ end
11481148+ in
11491149+ let results = match config.fork with
11501150+ | Some n when n > 1 ->
11511151+ Os.fork_map ~np:n ~temp_dir ~serialize ~deserialize solve_one packages
11521152+ | _ ->
11531153+ List.map (fun pkg_name -> (pkg_name, solve_one pkg_name)) packages
11541154+ in
11551155+ let () = Os.rm ~recursive:true temp_dir in
11561156+ let solutions = List.filter_map (fun (_, result) -> result) results in
11571157+ let total_failed = List.length packages - List.length solutions in
11581158+ let new_cached_count = try Array.length (Sys.readdir solutions_cache_dir) with _ -> 0 in
11591159+ let newly_cached = new_cached_count - cached_count in
11601160+ Printf.printf " %d solutions (%d newly solved), %d failed\n%!" (List.length solutions) newly_cached total_failed;
11611161+11621162+ (* Write initial progress after Phase 1 *)
11631163+ let progress = Day10_lib.Progress.create
11641164+ ~run_id:(Day10_lib.Run_log.get_id run_info)
11651165+ ~start_time:(Day10_lib.Run_log.format_time (Day10_lib.Run_log.get_start_time run_info))
11661166+ ~targets:(List.map OpamPackage.to_string (List.map fst solutions))
11671167+ in
11681168+ let progress = Day10_lib.Progress.set_solutions progress
11691169+ ~found:(List.length solutions)
11701170+ ~failed:total_failed
11711171+ in
11721172+ let progress = Day10_lib.Progress.set_phase progress Day10_lib.Progress.Blessings in
11731173+ Day10_lib.Progress.write ~run_dir:(Day10_lib.Run_log.get_run_dir run_info) progress;
11741174+ let progress_ref = ref progress in
11751175+11761176+ if solutions = [] then begin
11771177+ Printf.eprintf "No solutions found, nothing to build\n%!";
11781178+ exit 1
11791179+ end;
11801180+11811181+ (* Phase 2: Compute blessings *)
11821182+ Printf.printf "Phase 2: Computing blessings for %d targets...\n%!" (List.length solutions);
11831183+ let trans_deps_per_target = List.map (fun (target, solution) ->
11841184+ let ordered = topological_sort solution in
11851185+ let trans = pkg_deps solution ordered in
11861186+ (target, trans)
11871187+ ) solutions in
11881188+ let blessing_maps = Blessing.compute_blessings trans_deps_per_target in
11891189+11901190+ (* Report blessing stats *)
11911191+ let total_blessed = List.fold_left (fun acc (_, map) ->
11921192+ acc + OpamPackage.Map.fold (fun _ b c -> if b then c + 1 else c) map 0
11931193+ ) 0 blessing_maps in
11941194+ let total_packages = List.fold_left (fun acc (_, map) ->
11951195+ acc + OpamPackage.Map.cardinal map
11961196+ ) 0 blessing_maps in
11971197+ Printf.printf " %d/%d package instances blessed across %d targets\n%!"
11981198+ total_blessed total_packages (List.length solutions);
11991199+12001200+ (* Update progress: entering build phase *)
12011201+ progress_ref := Day10_lib.Progress.set_phase !progress_ref Day10_lib.Progress.Building;
12021202+ progress_ref := Day10_lib.Progress.set_build_total !progress_ref (List.length solutions);
12031203+ Day10_lib.Progress.write ~run_dir:(Day10_lib.Run_log.get_run_dir run_info) !progress_ref;
12041204+12051205+ (* Phase 3: Build with blessings *)
12061206+ let total_targets = List.length solutions in
12071207+ Printf.printf "Phase 3: Building %d targets...\n%!" total_targets;
12081208+ (* Create output directories if they're treated as directories (batch mode) *)
12091209+ let () = Option.iter (fun path -> Os.mkdir ~parents:true path) config.json in
12101210+ let () = Option.iter (fun path -> Os.mkdir ~parents:true path) config.md in
12111211+ let () = Option.iter (fun path -> Os.mkdir ~parents:true path) config.dot in
12121212+12131213+ let run_with_target (pkg, blessed_map) =
12141214+ let pkg_name = OpamPackage.to_string pkg in
12151215+ let json = Option.map (fun path -> Path.(path / pkg_name ^ ".json")) config.json in
12161216+ let md = Option.map (fun path -> Path.(path / pkg_name ^ ".md")) config.md in
12171217+ let dot = Option.map (fun path -> Path.(path / pkg_name ^ ".dot")) config.dot in
12181218+ let config = { config with
12191219+ package = pkg_name;
12201220+ blessed_map = Some blessed_map;
12211221+ json; md; dot;
12221222+ } in
12231223+ run_health_check config
12241224+ in
12251225+ let items = List.filter_map (fun (target, _solution) ->
12261226+ List.find_opt (fun (t, _) -> OpamPackage.equal t target) blessing_maps
12271227+ ) solutions in
12281228+ let print_batch_summary () =
12291229+ (* Count actual results by scanning the filesystem *)
12301230+ let os_key = Config.os_key ~config in
12311231+ let layer_dir = Path.(config.dir / os_key) in
12321232+ let build_success = ref 0 in
12331233+ let build_fail = ref 0 in
12341234+ let doc_success = ref 0 in
12351235+ let doc_fail = ref 0 in
12361236+ let failures = ref [] in
12371237+ let () =
12381238+ try
12391239+ Sys.readdir layer_dir |> Array.iter (fun name ->
12401240+ let layer_json = Path.(layer_dir / name / "layer.json") in
12411241+ if Sys.file_exists layer_json then
12421242+ try
12431243+ let json = Yojson.Safe.from_file layer_json in
12441244+ let open Yojson.Safe.Util in
12451245+ if String.length name > 6 && String.sub name 0 6 = "build-" then begin
12461246+ (* Build layer *)
12471247+ let pkg_name = json |> member "package" |> to_string in
12481248+ let exit_status = json |> member "exit_status" |> to_int_option |> Option.value ~default:(-1) in
12491249+ if exit_status = 0 then begin
12501250+ incr build_success;
12511251+ (* Add build log to run *)
12521252+ let build_log = Path.(layer_dir / name / "build.log") in
12531253+ Day10_lib.Run_log.add_build_log run_info ~package:pkg_name ~source_log:build_log
12541254+ end else begin
12551255+ incr build_fail;
12561256+ failures := (pkg_name, Printf.sprintf "build exit code %d" exit_status) :: !failures;
12571257+ let build_log = Path.(layer_dir / name / "build.log") in
12581258+ Day10_lib.Run_log.add_build_log run_info ~package:pkg_name ~source_log:build_log
12591259+ end
12601260+ end else if String.length name > 4 && String.sub name 0 4 = "doc-" then begin
12611261+ (* Doc layer - count blessed ones, but log all *)
12621262+ let pkg_name = json |> member "package" |> to_string in
12631263+ let doc = json |> member "doc" in
12641264+ let blessed = doc |> member "blessed" |> to_bool_option |> Option.value ~default:false in
12651265+ let status = doc |> member "status" |> to_string_option |> Option.value ~default:"" in
12661266+ (* Extract hash from doc layer name (doc-{hash}) for unique log filenames *)
12671267+ let layer_hash = String.sub name 4 (String.length name - 4) in
12681268+ (* Add doc log for all doc layers (use hash suffix for uniqueness) *)
12691269+ let doc_log = Path.(layer_dir / name / "odoc-voodoo-all.log") in
12701270+ Day10_lib.Run_log.add_doc_log run_info ~package:pkg_name ~source_log:doc_log ~layer_hash ();
12711271+ (* Only count blessed docs in summary stats *)
12721272+ if blessed then begin
12731273+ if status = "success" then
12741274+ incr doc_success
12751275+ else begin
12761276+ incr doc_fail;
12771277+ let error_msg = doc |> member "error" |> to_string_option |> Option.value ~default:"unknown error" in
12781278+ failures := (pkg_name, Printf.sprintf "doc: %s" error_msg) :: !failures
12791279+ end
12801280+ end
12811281+ end
12821282+ with _ -> ()
12831283+ )
12841284+ with _ -> ()
12851285+ in
12861286+ let html_versions = match config.html_output with
12871287+ | None -> 0
12881288+ | Some html_dir ->
12891289+ let p_dir = Path.(html_dir / "p") in
12901290+ if Sys.file_exists p_dir then
12911291+ try
12921292+ Sys.readdir p_dir |> Array.fold_left (fun acc pkg_name ->
12931293+ let pkg_dir = Path.(p_dir / pkg_name) in
12941294+ if Sys.is_directory pkg_dir then
12951295+ acc + (try Array.length (Sys.readdir pkg_dir) with _ -> 0)
12961296+ else acc
12971297+ ) 0
12981298+ with _ -> 0
12991299+ else 0
13001300+ in
13011301+ (* Write run summary *)
13021302+ let _summary = Day10_lib.Run_log.finish_run run_info
13031303+ ~targets_requested:(List.length packages)
13041304+ ~solutions_found:(List.length solutions)
13051305+ ~build_success:!build_success
13061306+ ~build_failed:!build_fail
13071307+ ~doc_success:!doc_success
13081308+ ~doc_failed:!doc_fail
13091309+ ~doc_skipped:0 (* TODO: track skipped docs *)
13101310+ ~failures:!failures
13111311+ in
13121312+ Printf.printf "\nBatch summary:\n%!";
13131313+ Printf.printf " Targets requested: %d\n%!" (List.length packages);
13141314+ Printf.printf " Solutions found: %d (failed to solve: %d)\n%!" (List.length solutions) total_failed;
13151315+ Printf.printf " Build layers: %d success, %d failed\n%!" !build_success !build_fail;
13161316+ Printf.printf " Doc layers: %d success, %d failed (blessed only)\n%!" !doc_success !doc_fail;
13171317+ Printf.printf " HTML versions: %d\n%!" html_versions
13181318+ in
13191319+ match config.fork with
13201320+ | Some 1 | None ->
13211321+ let build_count = ref 0 in
13221322+ List.iter (fun item ->
13231323+ run_with_target item;
13241324+ incr build_count;
13251325+ progress_ref := Day10_lib.Progress.set_completed !progress_ref
13261326+ ~build:!build_count ~doc:!build_count;
13271327+ Day10_lib.Progress.write ~run_dir:(Day10_lib.Run_log.get_run_dir run_info) !progress_ref
13281328+ ) items;
13291329+ (* Run global deferred doc link pass for x-extra-doc-deps *)
13301330+ run_global_deferred_doc_link config;
13311331+ (* Assemble JTW output if enabled *)
13321332+ (match config.with_jtw, config.jtw_output with
13331333+ | true, Some jtw_output ->
13341334+ Printf.printf "Phase 4: Assembling JTW output...\n%!";
13351335+ (* Find OCaml version from any solution *)
13361336+ let ocaml_version = List.find_map (fun (_target, solution) -> extract_ocaml_version solution) solutions in
13371337+ (match ocaml_version with
13381338+ | Some ocaml_version ->
13391339+ Jtw_gen.assemble_jtw_output ~config ~jtw_output ~ocaml_version ~solutions ~blessed_maps:blessing_maps
13401340+ | None -> Printf.printf " Warning: no OCaml version found in solutions, skipping JTW assembly\n%!")
13411341+ | _ -> ());
13421342+ (* Update progress: entering GC phase *)
13431343+ progress_ref := Day10_lib.Progress.set_phase !progress_ref Day10_lib.Progress.Gc;
13441344+ Day10_lib.Progress.write ~run_dir:(Day10_lib.Run_log.get_run_dir run_info) !progress_ref;
13451345+ (* Run garbage collection *)
13461346+ run_gc ~config ~solutions;
13471347+ print_batch_summary ();
13481348+ (* Delete progress.json - summary.json takes over *)
13491349+ Day10_lib.Progress.delete ~run_dir:(Day10_lib.Run_log.get_run_dir run_info)
13501350+ | Some n ->
13511351+ let completed = ref 0 in
13521352+ let failed = ref 0 in
13531353+ let last_reported = ref 0 in
13541354+ let on_complete exit_code =
13551355+ incr completed;
13561356+ if exit_code <> 0 then incr failed;
13571357+ (* Update progress.json after each target completion *)
13581358+ progress_ref := Day10_lib.Progress.set_completed !progress_ref
13591359+ ~build:!completed ~doc:!completed;
13601360+ Day10_lib.Progress.write ~run_dir:(Day10_lib.Run_log.get_run_dir run_info) !progress_ref;
13611361+ (* Report every 25 completions or at the end *)
13621362+ if !completed - !last_reported >= 25 || !completed = total_targets then begin
13631363+ (* Use fixed-width format with padding to overwrite previous content *)
13641364+ Printf.printf "\r%-60s\r" ""; (* Clear line *)
13651365+ if !failed > 0 then
13661366+ Printf.printf "[Phase 3] %d/%d targets completed (%d failed)%!" !completed total_targets !failed
13671367+ else
13681368+ Printf.printf "[Phase 3] %d/%d targets completed%!" !completed total_targets;
13691369+ last_reported := !completed
13701370+ end
13711371+ in
13721372+ Os.fork_with_progress ~np:n ~on_complete run_with_target items;
13731373+ Printf.printf "\n%!";
13741374+ (* Run global deferred doc link pass for x-extra-doc-deps *)
13751375+ run_global_deferred_doc_link config;
13761376+ (* Assemble JTW output if enabled *)
13771377+ (match config.with_jtw, config.jtw_output with
13781378+ | true, Some jtw_output ->
13791379+ Printf.printf "Phase 4: Assembling JTW output...\n%!";
13801380+ let ocaml_version = List.find_map (fun (_target, solution) -> extract_ocaml_version solution) solutions in
13811381+ (match ocaml_version with
13821382+ | Some ocaml_version ->
13831383+ Jtw_gen.assemble_jtw_output ~config ~jtw_output ~ocaml_version ~solutions ~blessed_maps:blessing_maps
13841384+ | None -> Printf.printf " Warning: no OCaml version found in solutions, skipping JTW assembly\n%!")
13851385+ | _ -> ());
13861386+ (* Update progress: entering GC phase *)
13871387+ progress_ref := Day10_lib.Progress.set_phase !progress_ref Day10_lib.Progress.Gc;
13881388+ Day10_lib.Progress.write ~run_dir:(Day10_lib.Run_log.get_run_dir run_info) !progress_ref;
13891389+ (* Run garbage collection *)
13901390+ run_gc ~config ~solutions;
13911391+ print_batch_summary ();
13921392+ (* Delete progress.json - summary.json takes over *)
13931393+ Day10_lib.Progress.delete ~run_dir:(Day10_lib.Run_log.get_run_dir run_info)
13941394+13951395+let cache_dir_term =
13961396+ let doc = "Directory to use for caching (required)" in
13971397+ Arg.(required & opt (some string) None & info [ "cache-dir" ] ~docv:"DIR" ~doc)
13981398+13991399+let ocaml_version_term =
14001400+ let doc = "OCaml version to use (if not specified, solver picks compatible version)" in
14011401+ Arg.(value & opt (some string) None & info [ "ocaml-version" ] ~docv:"VERSION" ~doc)
14021402+14031403+let opam_repository_term =
14041404+ let doc = "Directory containing opam repositories (required, can be specified multiple times)" in
14051405+ Arg.(non_empty & opt_all string [] & info [ "opam-repository" ] ~docv:"OPAM-REPO" ~doc)
14061406+14071407+let md_term =
14081408+ let doc = "Output results in markdown format" in
14091409+ Arg.(value & opt (some string) None & info [ "md" ] ~docv:"FILE" ~doc)
14101410+14111411+let json_term =
14121412+ let doc = "Output results in json format" in
14131413+ Arg.(value & opt (some string) None & info [ "json" ] ~docv:"FILE" ~doc)
14141414+14151415+let dot_term =
14161416+ let doc = "Save solution in Graphviz DOT format" in
14171417+ Arg.(value & opt (some string) None & info [ "dot" ] ~docv:"FILE" ~doc)
14181418+14191419+let with_test_term =
14201420+ let doc = "Enable test dependencies (default false)" in
14211421+ Arg.(value & flag & info [ "with-test" ] ~doc)
14221422+14231423+let with_doc_term =
14241424+ let doc = "Generate documentation with odoc (default false)" in
14251425+ Arg.(value & flag & info [ "with-doc" ] ~doc)
14261426+14271427+let doc_tools_repo_term =
14281428+ let doc = "Git repository for odoc tools (default: https://github.com/ocaml/odoc.git)" in
14291429+ Arg.(value & opt string "https://github.com/ocaml/odoc.git" & info [ "doc-tools-repo" ] ~docv:"URL" ~doc)
14301430+14311431+let doc_tools_branch_term =
14321432+ let doc = "Git branch for odoc tools (default: master)" in
14331433+ Arg.(value & opt string "master" & info [ "doc-tools-branch" ] ~docv:"BRANCH" ~doc)
14341434+14351435+let jtw_tools_repo_term =
14361436+ let doc = "Git repository for js_top_worker tools (default: https://github.com/jonnyfiveisonline/js_top_worker.git)" in
14371437+ Arg.(value & opt string "https://github.com/jonnyfiveisonline/js_top_worker.git" & info [ "jtw-tools-repo" ] ~docv:"URL" ~doc)
14381438+14391439+let jtw_tools_branch_term =
14401440+ let doc = "Git branch for js_top_worker tools (default: enhancements)" in
14411441+ Arg.(value & opt string "enhancements" & info [ "jtw-tools-branch" ] ~docv:"BRANCH" ~doc)
14421442+14431443+let html_output_term =
14441444+ let doc = "Shared HTML output directory for all documentation (enables doc generation for all packages)" in
14451445+ Arg.(value & opt (some string) None & info [ "html-output" ] ~docv:"DIR" ~doc)
14461446+14471447+let with_jtw_term =
14481448+ let doc = "Generate JTW (js_top_worker) artifacts for browser REPL (default false)" in
14491449+ Arg.(value & flag & info [ "with-jtw" ] ~doc)
14501450+14511451+let jtw_output_term =
14521452+ let doc = "Output directory for JTW artifacts (browser REPL support files)" in
14531453+ Arg.(value & opt (some string) None & info [ "jtw-output" ] ~docv:"DIR" ~doc)
14541454+14551455+let log_term =
14561456+ let doc = "Print build logs (default false)" in
14571457+ Arg.(value & flag & info [ "log" ] ~doc)
14581458+14591459+let dry_run_term =
14601460+ let doc = "Calculate solution and check if layers exist without building (default false)" in
14611461+ Arg.(value & flag & info [ "dry-run" ] ~doc)
14621462+14631463+let all_versions_term =
14641464+ let doc = "List all versions instead of just the latest" in
14651465+ Arg.(value & flag & info [ "all-versions" ] ~doc)
14661466+14671467+let tag_term =
14681468+ let doc = "Import layers into Docker with specified tag" in
14691469+ Arg.(value & opt (some string) None & info [ "tag" ] ~docv:"TAG" ~doc)
14701470+14711471+let arch_term =
14721472+ let doc = "Architecture (default: detected from system)" in
14731473+ let default = (OpamStd.Sys.uname ()).machine in
14741474+ Arg.(value & opt string default & info [ "arch" ] ~docv:"ARCH" ~doc)
14751475+14761476+let os_term =
14771477+ let doc = "Operating system (default: detected from system)" in
14781478+ let default = OpamSysPoll.os OpamVariable.Map.empty |> Option.value ~default:"linux" in
14791479+ Arg.(value & opt string default & info [ "os" ] ~docv:"OS" ~doc)
14801480+14811481+let os_distribution_term =
14821482+ let doc = "OS distribution (default: detected from system)" in
14831483+ let default = OpamSysPoll.os_distribution OpamVariable.Map.empty |> Option.value ~default:"debian" in
14841484+ Arg.(value & opt string default & info [ "os-distribution" ] ~docv:"OS_DISTRIBUTION" ~doc)
14851485+14861486+let os_family_term =
14871487+ let doc = "OS family (default: detected from system)" in
14881488+ let default = OpamSysPoll.os_family OpamVariable.Map.empty |> Option.value ~default:"debian" in
14891489+ Arg.(value & opt string default & info [ "os-family" ] ~docv:"OS_FAMILY" ~doc)
14901490+14911491+let os_version_term =
14921492+ let doc = "OS version (default: detected from system)" in
14931493+ let default = OpamSysPoll.os_version OpamVariable.Map.empty |> Option.value ~default:"13" in
14941494+ Arg.(value & opt string default & info [ "os-version" ] ~docv:"OS_VERSION" ~doc)
14951495+14961496+let fork_term =
14971497+ let doc = "Process packages in parallel using fork with N parallel jobs" in
14981498+ Arg.(value & opt (some int) None & info [ "fork" ] ~docv:"N" ~doc)
14991499+15001500+let prune_layers_term =
15011501+ let doc = "Delete package layers after docs are extracted to html-output (saves disk space)" in
15021502+ Arg.(value & flag & info [ "prune-layers" ] ~doc)
15031503+15041504+let blessed_map_term =
15051505+ let doc = "Path to a pre-computed blessing map JSON file (from batch mode)" in
15061506+ Arg.(value & opt (some string) None & info [ "blessed-map" ] ~docv:"FILE" ~doc)
15071507+15081508+let find_opam_files dir =
15091509+ try
15101510+ Sys.readdir dir |> Array.to_list |> List.filter_map (fun name -> if Filename.check_suffix name ".opam" then Some (Filename.remove_extension name) else None)
15111511+ with
15121512+ | Sys_error _ -> []
15131513+15141514+let ci_cmd =
15151515+ let directory_arg =
15161516+ let doc = "Directory to test" in
15171517+ Arg.(required & pos 0 (some string) None & info [] ~docv:"DIRECTORY" ~doc)
15181518+ in
15191519+ let ci_term =
15201520+ Term.(
15211521+ const (fun dir ocaml_version opam_repositories directory md json dot with_test with_doc doc_tools_repo doc_tools_branch html_output log dry_run arch os os_distribution os_family os_version fork prune_layers ->
15221522+ let ocaml_version = Option.map OpamPackage.of_string ocaml_version in
15231523+ run_ci
15241524+ {
15251525+ dir;
15261526+ ocaml_version;
15271527+ opam_repositories;
15281528+ package = List.hd (find_opam_files directory);
15291529+ arch;
15301530+ os;
15311531+ os_distribution;
15321532+ os_family;
15331533+ os_version;
15341534+ directory = Some directory;
15351535+ md;
15361536+ json;
15371537+ dot;
15381538+ with_test;
15391539+ with_doc;
15401540+ with_jtw = false;
15411541+ doc_tools_repo;
15421542+ doc_tools_branch;
15431543+ jtw_tools_repo = "";
15441544+ jtw_tools_branch = "";
15451545+ html_output;
15461546+ jtw_output = None;
15471547+ tag = None;
15481548+ log;
15491549+ dry_run;
15501550+ fork;
15511551+ prune_layers;
15521552+ blessed_map = None;
15531553+ })
15541554+ $ cache_dir_term $ ocaml_version_term $ opam_repository_term $ directory_arg $ md_term $ json_term $ dot_term $ with_test_term $ with_doc_term $ doc_tools_repo_term $ doc_tools_branch_term $ html_output_term $ log_term $ dry_run_term $ arch_term $ os_term $ os_distribution_term $ os_family_term $ os_version_term $ fork_term $ prune_layers_term)
15551555+ in
15561556+ let ci_info = Cmd.info "ci" ~doc:"Run CI tests on a directory" in
15571557+ Cmd.v ci_info ci_term
15581558+15591559+let health_check_cmd =
15601560+ let package_arg =
15611561+ let doc = "Package name to test (or @filename to read package list from file)" in
15621562+ Arg.(required & pos 0 (some string) None & info [] ~docv:"PACKAGE" ~doc)
15631563+ in
15641564+ let health_check_term =
15651565+ Term.(
15661566+ const (fun dir ocaml_version opam_repositories package_arg md json dot with_test with_doc with_jtw doc_tools_repo doc_tools_branch jtw_tools_repo jtw_tools_branch html_output jtw_output log dry_run tag arch os os_distribution os_family os_version fork prune_layers blessed_map_file ->
15671567+ let ocaml_version = Option.map OpamPackage.of_string ocaml_version in
15681568+ let blessed_map = Option.map Blessing.load_blessed_map blessed_map_file in
15691569+ run_health_check_multi { dir; ocaml_version; opam_repositories; package = ""; arch; os; os_distribution; os_family; os_version; directory = None; md; json; dot; with_test; with_doc; with_jtw; doc_tools_repo; doc_tools_branch; jtw_tools_repo; jtw_tools_branch; html_output; jtw_output; tag; log; dry_run; fork; prune_layers; blessed_map } package_arg)
15701570+ $ cache_dir_term $ ocaml_version_term $ opam_repository_term $ package_arg $ md_term $ json_term $ dot_term $ with_test_term $ with_doc_term $ with_jtw_term $ doc_tools_repo_term $ doc_tools_branch_term $ jtw_tools_repo_term $ jtw_tools_branch_term $ html_output_term $ jtw_output_term $ log_term $ dry_run_term $ tag_term $ arch_term $ os_term $ os_distribution_term $ os_family_term $ os_version_term $ fork_term $ prune_layers_term $ blessed_map_term)
15711571+ in
15721572+ let health_check_info = Cmd.info "health-check" ~doc:"Run health check on a package or list of packages" in
15731573+ Cmd.v health_check_info health_check_term
15741574+15751575+let list_cmd =
15761576+ let list_term =
15771577+ Term.(
15781578+ const (fun ocaml_version opam_repositories all_versions json arch os os_distribution os_family os_version ->
15791579+ let ocaml_version = Option.map OpamPackage.of_string ocaml_version in
15801580+ run_list
15811581+ { dir = ""; ocaml_version; opam_repositories; package = ""; arch; os; os_distribution; os_family; os_version; directory = None; md = None; json; dot = None; with_test = false; with_doc = false; with_jtw = false; doc_tools_repo = ""; doc_tools_branch = ""; jtw_tools_repo = ""; jtw_tools_branch = ""; html_output = None; jtw_output = None; tag = None; log = false; dry_run = false; fork = None; prune_layers = false; blessed_map = None }
15821582+ all_versions)
15831583+ $ ocaml_version_term $ opam_repository_term $ all_versions_term $ json_term $ arch_term $ os_term $ os_distribution_term $ os_family_term $ os_version_term)
15841584+ in
15851585+ let list_info = Cmd.info "list" ~doc:"List packages in opam repositories" in
15861586+ Cmd.v list_info list_term
15871587+15881588+let sync_docs_cmd =
15891589+ let destination_arg =
15901590+ let doc = "Destination for documentation (local path, user@host:/path, or rsync://host/path)" in
15911591+ Arg.(required & pos 0 (some string) None & info [] ~docv:"DESTINATION" ~doc)
15921592+ in
15931593+ let blessed_only_term =
15941594+ let doc = "Only sync blessed (canonical) packages" in
15951595+ Arg.(value & flag & info [ "blessed-only" ] ~doc)
15961596+ in
15971597+ let package_term =
15981598+ let doc = "Only sync specific package" in
15991599+ Arg.(value & opt (some string) None & info [ "package" ] ~docv:"PKG" ~doc)
16001600+ in
16011601+ let index_term =
16021602+ let doc = "Generate index.html listing all packages" in
16031603+ Arg.(value & flag & info [ "index" ] ~doc)
16041604+ in
16051605+ let sync_docs_term =
16061606+ Term.(
16071607+ const (fun cache_dir destination dry_run blessed_only package_filter generate_index arch _os os_distribution os_version ->
16081608+ let os_key = Printf.sprintf "%s-%s-%s" os_distribution os_version arch in
16091609+ let success = Sync_docs.sync ~cache_dir ~os_key ~destination ~dry_run ~blessed_only ~package_filter in
16101610+ let index_ok =
16111611+ if generate_index && success then
16121612+ Sync_docs.generate_index ~cache_dir ~os_key ~destination ~dry_run
16131613+ else
16141614+ true
16151615+ in
16161616+ if not (success && index_ok) then Stdlib.exit 1)
16171617+ $ cache_dir_term $ destination_arg $ dry_run_term $ blessed_only_term $ package_term $ index_term $ arch_term $ os_term $ os_distribution_term $ os_version_term)
16181618+ in
16191619+ let sync_docs_info = Cmd.info "sync-docs" ~doc:"Sync generated documentation to a destination" in
16201620+ Cmd.v sync_docs_info sync_docs_term
16211621+16221622+let combine_docs_cmd =
16231623+ let mount_point_arg =
16241624+ let doc = "Mount point for the combined documentation overlay" in
16251625+ Arg.(required & pos 0 (some string) None & info [] ~docv:"MOUNT_POINT" ~doc)
16261626+ in
16271627+ let work_dir_term =
16281628+ let doc = "Work directory for overlay (must be on same filesystem as cache)" in
16291629+ Arg.(value & opt (some string) None & info [ "work-dir" ] ~docv:"DIR" ~doc)
16301630+ in
16311631+ let index_term =
16321632+ let doc = "Generate index.html listing all packages" in
16331633+ Arg.(value & flag & info [ "index" ] ~doc)
16341634+ in
16351635+ let unmount_term =
16361636+ let doc = "Unmount instead of mount" in
16371637+ Arg.(value & flag & info [ "unmount"; "u" ] ~doc)
16381638+ in
16391639+ let support_files_term =
16401640+ let doc = "Directory containing odoc support files (CSS, JS, fonts)" in
16411641+ Arg.(value & opt (some string) None & info [ "support-files" ] ~docv:"DIR" ~doc)
16421642+ in
16431643+ let combine_docs_term =
16441644+ Term.(
16451645+ const (fun cache_dir mount_point work_dir generate_index unmount support_files_dir arch _os os_distribution os_version ->
16461646+ let os_key = Printf.sprintf "%s-%s-%s" os_distribution os_version arch in
16471647+ if unmount then begin
16481648+ if not (Combine_docs.unmount ~mount_point) then Stdlib.exit 1
16491649+ end
16501650+ else begin
16511651+ let work_dir = match work_dir with
16521652+ | Some d -> d
16531653+ | None ->
16541654+ Printf.eprintf "Error: --work-dir is required for mounting\n%!";
16551655+ Stdlib.exit 1
16561656+ in
16571657+ if not (Combine_docs.combine ~cache_dir ~os_key ~mount_point ~work_dir
16581658+ ~generate_idx:generate_index ~support_files_dir) then
16591659+ Stdlib.exit 1
16601660+ end)
16611661+ $ cache_dir_term $ mount_point_arg $ work_dir_term $ index_term $ unmount_term
16621662+ $ support_files_term
16631663+ $ arch_term $ os_term $ os_distribution_term $ os_version_term)
16641664+ in
16651665+ let combine_docs_info = Cmd.info "combine-docs" ~doc:"Combine documentation layers using overlayfs" in
16661666+ Cmd.v combine_docs_info combine_docs_term
16671667+16681668+let batch_cmd =
16691669+ let package_arg =
16701670+ let doc = "Package name or @filename to read package list from file (JSON format: {\"packages\":[...]})" in
16711671+ Arg.(required & pos 0 (some string) None & info [] ~docv:"PACKAGE" ~doc)
16721672+ in
16731673+ let batch_term =
16741674+ Term.(
16751675+ const (fun dir ocaml_version opam_repositories package_arg md json dot with_test with_doc with_jtw doc_tools_repo doc_tools_branch jtw_tools_repo jtw_tools_branch html_output jtw_output log dry_run tag arch os os_distribution os_family os_version fork prune_layers ->
16761676+ let ocaml_version = Option.map OpamPackage.of_string ocaml_version in
16771677+ run_batch { dir; ocaml_version; opam_repositories; package = ""; arch; os; os_distribution; os_family; os_version; directory = None; md; json; dot; with_test; with_doc; with_jtw; doc_tools_repo; doc_tools_branch; jtw_tools_repo; jtw_tools_branch; html_output; jtw_output; tag; log; dry_run; fork; prune_layers; blessed_map = None } package_arg)
16781678+ $ cache_dir_term $ ocaml_version_term $ opam_repository_term $ package_arg $ md_term $ json_term $ dot_term $ with_test_term $ with_doc_term $ with_jtw_term $ doc_tools_repo_term $ doc_tools_branch_term $ jtw_tools_repo_term $ jtw_tools_branch_term $ html_output_term $ jtw_output_term $ log_term $ dry_run_term $ tag_term $ arch_term $ os_term $ os_distribution_term $ os_family_term $ os_version_term $ fork_term $ prune_layers_term)
16791679+ in
16801680+ let batch_info = Cmd.info "batch" ~doc:"Solve all targets, compute blessings, then build with pre-computed blessing maps" in
16811681+ Cmd.v batch_info batch_term
16821682+16831683+let main_info =
16841684+ let doc = "A tool for running CI and health checks" in
16851685+ let man =
16861686+ [
16871687+ `S Manpage.s_description;
16881688+ `P "This tool provides CI testing and health checking capabilities.";
16891689+ `P "Use '$(mname) ci DIRECTORY' to run CI tests on a directory.";
16901690+ `P "Use '$(mname) health-check PACKAGE' to run health checks on a package.";
16911691+ `P "Use '$(mname) health-check @FILENAME' to run health checks on multiple packages listed in FILENAME (JSON format: {\"packages\":[...]})";
16921692+ `P "Use '$(mname) batch PACKAGE' to solve, compute blessings, and build in batch mode.";
16931693+ `P "Use '$(mname) list' list packages in opam repository.";
16941694+ `P "Use '$(mname) sync-docs DESTINATION' to sync documentation to a destination.";
16951695+ `P "Use '$(mname) combine-docs MOUNT_POINT' to combine all doc layers into an overlay mount.";
16961696+ `P "Add --md flag to output results in markdown format.";
16971697+ `S Manpage.s_examples;
16981698+ `P "$(mname) ci --cache-dir /tmp/cache --opam-repository /tmp/opam-repository /path/to/project";
16991699+ `P "$(mname) health-check --cache-dir /tmp/cache --opam-repositories /tmp/opam-repository package --md";
17001700+ `P "$(mname) health-check --cache-dir /tmp/cache --opam-repositories /tmp/opam-repository @packages.json";
17011701+ `P "$(mname) batch --cache-dir /tmp/cache --opam-repository /tmp/opam-repository --with-doc --html-output /tmp/docs @packages.json";
17021702+ `P "$(mname) list --opam-repositories /tmp/opam-repository";
17031703+ `P "$(mname) sync-docs --cache-dir /tmp/cache /var/www/docs --index";
17041704+ `P "$(mname) sync-docs --cache-dir /tmp/cache user@host:/var/www/docs";
17051705+ ]
17061706+ in
17071707+ Cmd.info "day10" ~version:"0.0.1" ~doc ~man
17081708+17091709+let () =
17101710+ let default_term = Term.(ret (const (`Help (`Pager, None)))) in
17111711+ let cmd = Cmd.group ~default:default_term main_info [ ci_cmd; health_check_cmd; batch_cmd; list_cmd; sync_docs_cmd; combine_docs_cmd ] in
17121712+ exit (Cmd.eval cmd)
···11+(** Documentation generation orchestration using odoc_driver_voodoo.
22+33+ This module prepares the directory structure expected by odoc_driver_voodoo
44+ and provides the command to run it. odoc_driver handles all the odoc
55+ compile/link/html steps internally.
66+77+ Per-package prep structure (deleted after odoc compile):
88+ layer/prep/universes/{u}/{p}/{v}/
99+ lib/ # .cmti/.cmt/.ml/.mli files (copied from opam lib)
1010+ doc/ # .mld files (copied from opam doc)
1111+1212+ Accumulated odoc output (persists in layer overlay):
1313+ /home/opam/compile/ # .odoc files, visible to downstream packages
1414+1515+ HTML output (bind-mounted shared directory):
1616+ /html/p/{pkg}/{version}/ # blessed packages
1717+ /html/u/{universe}/{pkg}/{version}/ # non-blessed packages *)
1818+1919+type doc_result =
2020+ | Doc_success of { html_path : string; blessed : bool }
2121+ | Doc_skipped (** Build failed, no docs generated *)
2222+ | Doc_failure of string (** Doc generation failed with error message *)
2323+2424+let doc_result_to_yojson = function
2525+ | Doc_success { html_path; blessed } ->
2626+ `Assoc [ ("status", `String "success"); ("html_path", `String html_path); ("blessed", `Bool blessed) ]
2727+ | Doc_skipped -> `Assoc [ ("status", `String "skipped") ]
2828+ | Doc_failure msg -> `Assoc [ ("status", `String "failure"); ("error", `String msg) ]
2929+3030+(** Compute universe hash from ordered dependency hashes.
3131+ This reuses the existing hash computation from the build. *)
3232+let compute_universe_hash ordered_hashes =
3333+ String.concat " " ordered_hashes |> Digest.string |> Digest.to_hex
3434+3535+(** Extract dependency names from a filtered formula. *)
3636+let get_dep_names formula =
3737+ let rec extract acc = function
3838+ | OpamFormula.Empty -> acc
3939+ | OpamFormula.Atom (name, _) -> OpamPackage.Name.Set.add name acc
4040+ | OpamFormula.Block f -> extract acc f
4141+ | OpamFormula.And (a, b) | OpamFormula.Or (a, b) -> extract (extract acc a) b
4242+ in
4343+ extract OpamPackage.Name.Set.empty formula
4444+4545+(** Get filtered dependencies from an opam file.
4646+ ~post:false gives compile deps, ~post:true gives link deps. *)
4747+let get_filtered_deps ~post opamfile =
4848+ opamfile
4949+ |> OpamFile.OPAM.depends
5050+ |> OpamFilter.partial_filter_formula
5151+ (OpamFilter.deps_var_env ~build:true ~post ~test:false
5252+ ~doc:true ~dev_setup:false ~dev:false)
5353+ |> get_dep_names
5454+5555+(** Determine if compile and link deps differ (i.e., there are post deps).
5656+ Returns (compile_deps, link_deps, needs_separate_phases). *)
5757+let analyze_doc_deps opamfile =
5858+ let compile_deps = get_filtered_deps ~post:false opamfile in
5959+ let link_deps = get_filtered_deps ~post:true opamfile in
6060+ let needs_separate_phases = not (OpamPackage.Name.Set.equal compile_deps link_deps) in
6161+ (compile_deps, link_deps, needs_separate_phases)
6262+6363+(** Get the post-only dependencies (link deps that aren't compile deps).
6464+ These are packages that need to be built after this package but before
6565+ its documentation can be linked. *)
6666+let get_post_deps opamfile =
6767+ let compile_deps = get_filtered_deps ~post:false opamfile in
6868+ let link_deps = get_filtered_deps ~post:true opamfile in
6969+ OpamPackage.Name.Set.diff link_deps compile_deps
7070+7171+(** Extract x-extra-doc-deps from an opam file.
7272+ These are documentation dependencies that create cycles (e.g., odoc depends on
7373+ sherlodoc for docs, but sherlodoc depends on odoc to build).
7474+ The x-extra-doc-deps field contains package names that should only be used
7575+ during the doc link phase, not during compile.
7676+7777+ The field can contain:
7878+ - Simple package names: "odig"
7979+ - Package names with constraints: "odoc-driver" {= version}
8080+8181+ We only extract the package names, ignoring constraints. *)
8282+let get_extra_doc_deps opamfile =
8383+ let open OpamParserTypes.FullPos in
8484+ let extensions = OpamFile.OPAM.extensions opamfile in
8585+ match OpamStd.String.Map.find_opt "x-extra-doc-deps" extensions with
8686+ | None -> OpamPackage.Name.Set.empty
8787+ | Some value ->
8888+ (* Extract package name from a value, handling both:
8989+ - String "pkg-name"
9090+ - Option ({ pelem = String "pkg-name"; ... }, _) for "pkg-name" {constraint} *)
9191+ let extract_name item =
9292+ match item.pelem with
9393+ | String name -> Some name
9494+ | Option (inner, _) ->
9595+ (match inner.pelem with
9696+ | String name -> Some name
9797+ | _ -> None)
9898+ | _ -> None
9999+ in
100100+ let extract_names acc v =
101101+ match v.pelem with
102102+ | List { pelem = items; _ } ->
103103+ List.fold_left (fun acc item ->
104104+ match extract_name item with
105105+ | Some name ->
106106+ OpamPackage.Name.Set.add (OpamPackage.Name.of_string name) acc
107107+ | None -> acc
108108+ ) acc items
109109+ | _ -> acc
110110+ in
111111+ extract_names OpamPackage.Name.Set.empty value
112112+113113+(** Copy compilation artifacts from opam lib to prep structure.
114114+115115+ Uses the installed_libs and installed_docs lists from layer.json to know
116116+ exactly which files were installed by this package. This avoids the problem
117117+ of guessing package ownership based on META/dune-package presence.
118118+119119+ Maps:
120120+ ~/.opam/default/lib/{rel_path} -> prep/.../lib/{rel_path}
121121+ ~/.opam/default/doc/{rel_path} -> prep/.../doc/{rel_path}
122122+123123+ Where rel_path comes from the installed_libs/installed_docs lists. *)
124124+let create_prep_structure ~source_layer_dir ~dest_layer_dir ~universe ~pkg ~installed_libs ~installed_docs =
125125+ let pkg_name = OpamPackage.name_to_string pkg in
126126+ let pkg_version = OpamPackage.version_to_string pkg in
127127+ let opam_lib = Path.(source_layer_dir / "fs" / "home" / "opam" / ".opam" / "default" / "lib") in
128128+ let opam_doc = Path.(source_layer_dir / "fs" / "home" / "opam" / ".opam" / "default" / "doc") in
129129+ let prep_base = Path.(dest_layer_dir / "prep" / "universes" / universe / pkg_name / pkg_version) in
130130+ let prep_lib = Path.(prep_base / "lib") in
131131+ let prep_doc = Path.(prep_base / "doc") in
132132+ (* Helper to link or copy a file, creating parent directories as needed *)
133133+ let link_file src_file dst_file =
134134+ if Sys.file_exists src_file && not (Sys.is_directory src_file) then begin
135135+ (* Skip if destination already exists (another worker may have created it) *)
136136+ if Sys.file_exists dst_file then ()
137137+ else begin
138138+ let dst_dir = Filename.dirname dst_file in
139139+ Os.mkdir ~parents:true dst_dir;
140140+ (* Use sudo for hardlinks due to fs.protected_hardlinks - can't link to files we don't own *)
141141+ let rc = Os.sudo ~stderr:"/dev/null" [ "ln"; src_file; dst_file ] in
142142+ if rc <> 0 && not (Sys.file_exists dst_file) then
143143+ (* Only copy if ln failed AND file still doesn't exist (race condition protection) *)
144144+ try Os.cp src_file dst_file with
145145+ | Os.Copy_error _ when Sys.file_exists dst_file -> () (* Another worker created it *)
146146+ | Os.Copy_error _ when not (Sys.file_exists src_file) -> () (* Source was removed *)
147147+ end
148148+ end
149149+ in
150150+ (* Copy each file from installed_libs *)
151151+ List.iter (fun rel_path ->
152152+ let src_file = Path.(opam_lib / rel_path) in
153153+ let dst_file = Path.(prep_lib / rel_path) in
154154+ link_file src_file dst_file
155155+ ) installed_libs;
156156+ (* Copy each file from installed_docs *)
157157+ List.iter (fun rel_path ->
158158+ let src_file = Path.(opam_doc / rel_path) in
159159+ let dst_file = Path.(prep_doc / rel_path) in
160160+ link_file src_file dst_file
161161+ ) installed_docs;
162162+ (* Ensure directories exist *)
163163+ Os.mkdir ~parents:true prep_lib;
164164+ Os.mkdir ~parents:true prep_doc;
165165+ prep_base
166166+167167+(** Generate shell command to run odoc_driver_voodoo for the target package.
168168+ The odoc_bin and odoc_md_bin paths point to the specific binaries from the
169169+ doc tool layers, avoiding conflicts with any odoc installed in the build layer. *)
170170+let odoc_driver_voodoo_command ~pkg ~universe:_ ~blessed ~actions ~odoc_bin ~odoc_md_bin =
171171+ let pkg_name = OpamPackage.name_to_string pkg in
172172+ let blessed_flag = if blessed then "--blessed" else "" in
173173+ Printf.sprintf {|
174174+set -ex
175175+cd /workdir
176176+177177+echo "=== ODOC_DRIVER_VOODOO for %s ==="
178178+echo "Actions: %s, Blessed: %s"
179179+echo "Using odoc: %s"
180180+echo "Using odoc-md: %s"
181181+182182+echo "Prep structure:"
183183+find prep -type f | head -50
184184+185185+echo "=== Running odoc_driver_voodoo for %s ==="
186186+odoc_driver_voodoo %s --odoc-dir /home/opam/compile --html-dir /html --actions %s -j 1 -v %s --odoc %s --odoc-md %s
187187+|} pkg_name actions (if blessed then "true" else "false") odoc_bin odoc_md_bin pkg_name pkg_name actions blessed_flag odoc_bin odoc_md_bin
188188+189189+(** Container paths for odoc_driver_voodoo.
190190+ Note: compile output goes to /home/opam/compile inside the fs,
191191+ which is captured by the overlay and ends up in layer/fs/. *)
192192+let container_workdir = "/workdir"
193193+let container_html_output = "/html"
194194+
+30
day10/bin/opamh.ml
···11+(* Concept from opamh *)
22+33+let compiler_packages =
44+ List.map OpamPackage.Name.of_string
55+ [
66+ "base-bigarray";
77+ "base-domains";
88+ "base-effects";
99+ "base-nnp";
1010+ "base-threads";
1111+ "base-unix";
1212+ (* add other archs *)
1313+ "host-arch-x86";
1414+ "host-system-other";
1515+ "ocaml";
1616+ "ocaml-base-compiler";
1717+ "ocaml-compiler";
1818+ "ocaml-config";
1919+ "ocaml-options-vanilla";
2020+ ]
2121+2222+let dump_state packages_dir state_file =
2323+ let content = Sys.readdir packages_dir |> Array.to_list in
2424+ let packages = List.filter_map (fun x -> OpamPackage.of_string_opt x) content in
2525+ let sel_compiler = List.filter (fun x -> List.mem (OpamPackage.name x) compiler_packages) packages in
2626+ let new_state =
2727+ let s = OpamPackage.Set.of_list packages in
2828+ { OpamTypes.sel_installed = s; sel_roots = s; sel_pinned = OpamPackage.Set.empty; sel_compiler = OpamPackage.Set.of_list sel_compiler }
2929+ in
3030+ OpamFilename.write (OpamFilename.raw state_file) (OpamFile.SwitchSelections.write_to_string new_state)
+674
day10/bin/os.ml
···11+let read_from_file filename = In_channel.with_open_text filename @@ fun ic -> In_channel.input_all ic
22+let write_to_file filename str = Out_channel.with_open_text filename @@ fun oc -> Out_channel.output_string oc str
33+let append_to_file filename str = Out_channel.with_open_gen [ Open_text; Open_append; Open_creat ] 0o644 filename @@ fun oc -> Out_channel.output_string oc str
44+55+(* Per-PID logging *)
66+let log_dir = ref None
77+88+let set_log_dir dir =
99+ log_dir := Some dir;
1010+ if not (Sys.file_exists dir) then
1111+ try Sys.mkdir dir 0o755 with _ -> ()
1212+1313+let log fmt =
1414+ Printf.ksprintf (fun msg ->
1515+ match !log_dir with
1616+ | None -> () (* logging disabled *)
1717+ | Some dir ->
1818+ let pid = Unix.getpid () in
1919+ let timestamp = Unix.gettimeofday () in
2020+ let time_str =
2121+ let tm = Unix.localtime timestamp in
2222+ Printf.sprintf "%02d:%02d:%02d.%03d"
2323+ tm.Unix.tm_hour tm.Unix.tm_min tm.Unix.tm_sec
2424+ (int_of_float ((timestamp -. floor timestamp) *. 1000.))
2525+ in
2626+ let log_file = Filename.concat dir (Printf.sprintf "%d.log" pid) in
2727+ let line = Printf.sprintf "[%s] %s\n" time_str msg in
2828+ append_to_file log_file line
2929+ ) fmt
3030+3131+let sudo ?stdout ?stderr cmd =
3232+ (* let () = OpamConsole.note "%s" (String.concat " " cmd) in *)
3333+ Sys.command (Filename.quote_command ?stdout ?stderr "sudo" cmd)
3434+3535+let exec ?stdout ?stderr cmd =
3636+ Sys.command (Filename.quote_command ?stdout ?stderr (List.hd cmd) (List.tl cmd))
3737+3838+let retry_exec ?stdout ?stderr ?(tries = 10) cmd =
3939+ let rec loop n =
4040+ match (exec ?stdout ?stderr cmd, n) with
4141+ | 0, _ -> 0
4242+ | r, 0 -> r
4343+ | _, n ->
4444+ OpamConsole.note "retry %i: %s" (tries - n + 1) (String.concat " " cmd);
4545+ Unix.sleepf (Random.float 2.0);
4646+ loop (n - 1)
4747+ in
4848+ loop tries
4949+5050+let retry_rename ?(tries = 10) src dst =
5151+ let rec loop n =
5252+ try Unix.rename src dst with
5353+ | Unix.Unix_error (Unix.EACCES, x, y) ->
5454+ let d = tries - n + 1 in
5555+ OpamConsole.note "retry_rename %i: %s -> %s" d src dst;
5656+ Unix.sleep ((d * d) + Random.int d);
5757+ if n = 1 then raise (Unix.Unix_error (Unix.EACCES, x, y)) else loop (n - 1)
5858+ in
5959+ loop tries
6060+6161+let run cmd =
6262+ let inp = Unix.open_process_in cmd in
6363+ let r = In_channel.input_all inp in
6464+ In_channel.close inp;
6565+ r
6666+6767+let nproc () = run "nproc" |> String.trim |> int_of_string
6868+6969+let rec mkdir ?(parents = false) dir =
7070+ if not (Sys.file_exists dir) then (
7171+ (if parents then
7272+ let parent_dir = Filename.dirname dir in
7373+ if parent_dir <> dir then mkdir ~parents:true parent_dir);
7474+ Sys.mkdir dir 0o755)
7575+7676+(** Create a unique temporary directory. Unlike Filename.temp_dir, this includes
7777+ the PID in the name to guarantee uniqueness across forked processes. *)
7878+let temp_dir ?(perms = 0o700) ~parent_dir prefix suffix =
7979+ let pid = Unix.getpid () in
8080+ let rec try_create attempts =
8181+ let rand = Random.int 0xFFFFFF in
8282+ let name = Printf.sprintf "%s%d-%06x%s" prefix pid rand suffix in
8383+ let path = Filename.concat parent_dir name in
8484+ try
8585+ Unix.mkdir path perms;
8686+ path
8787+ with Unix.Unix_error (Unix.EEXIST, _, _) ->
8888+ if attempts > 0 then try_create (attempts - 1)
8989+ else raise (Sys_error (path ^ ": File exists"))
9090+ in
9191+ try_create 100
9292+9393+let rec rm ?(recursive = false) path =
9494+ try
9595+ let stat = Unix.lstat path in
9696+ match stat.st_kind with
9797+ | S_REG
9898+ | S_LNK
9999+ | S_CHR
100100+ | S_BLK
101101+ | S_FIFO
102102+ | S_SOCK -> (
103103+ try Unix.unlink path with
104104+ | Unix.Unix_error (Unix.EACCES, _, _) ->
105105+ Unix.chmod path (stat.st_perm lor 0o222);
106106+ Unix.unlink path)
107107+ | S_DIR ->
108108+ if recursive then Sys.readdir path |> Array.iter (fun f -> rm ~recursive (Filename.concat path f));
109109+ Unix.rmdir path
110110+ with
111111+ | Unix.Unix_error (Unix.ENOENT, _, _) -> (
112112+ try
113113+ match Sys.is_directory path with
114114+ | true -> Sys.rmdir path
115115+ | false -> Sys.remove path
116116+ with
117117+ | _ -> ())
118118+119119+(** Remove a directory, using sudo if needed for root-owned files. *)
120120+let sudo_rm_rf path =
121121+ try rm ~recursive:true path with
122122+ | Unix.Unix_error (Unix.EACCES, _, _)
123123+ | Unix.Unix_error (Unix.EPERM, _, _) ->
124124+ (* Files owned by root from container builds - use sudo *)
125125+ ignore (sudo [ "rm"; "-rf"; path ])
126126+127127+(** Safely rename a temp directory to a target directory.
128128+ Handles ENOTEMPTY which can occur if:
129129+ 1. Another worker already completed the target (marker_file exists) - just clean up src
130130+ 2. A previous crashed run left a stale target (no marker_file) - delete target and retry
131131+132132+ [marker_file] is the path to check if the target is complete (e.g., layer.json) *)
133133+let safe_rename_dir ~marker_file src dst =
134134+ try Unix.rename src dst with
135135+ | Unix.Unix_error (Unix.ENOTEMPTY, _, _)
136136+ | Unix.Unix_error (Unix.EEXIST, _, _) ->
137137+ let dst_basename = Filename.basename dst in
138138+ if Sys.file_exists marker_file then begin
139139+ (* Target already complete by another worker - clean up our temp dir *)
140140+ log "Target already exists, cleaning up temp: %s" dst_basename;
141141+ sudo_rm_rf src
142142+ end else begin
143143+ (* Stale target from crashed run - remove it and retry *)
144144+ log "Removing stale target: %s" dst_basename;
145145+ sudo_rm_rf dst;
146146+ Unix.rename src dst
147147+ end
148148+149149+module IntSet = Set.Make (Int)
150150+151151+let fork ?np f lst =
152152+ let nproc = Option.value ~default:(nproc ()) np in
153153+ List.fold_left
154154+ (fun acc x ->
155155+ let acc =
156156+ let rec loop acc =
157157+ if IntSet.cardinal acc <= nproc then acc
158158+ else
159159+ let running, finished =
160160+ IntSet.partition
161161+ (fun pid ->
162162+ let c, _ = Unix.waitpid [ WNOHANG ] pid in
163163+ pid <> c)
164164+ acc
165165+ in
166166+ let () = if IntSet.is_empty finished then Unix.sleepf 0.1 in
167167+ loop running
168168+ in
169169+ loop acc
170170+ in
171171+ match Unix.fork () with
172172+ | 0 ->
173173+ (* Reseed RNG after fork using PID to avoid temp directory collisions *)
174174+ Random.init (Unix.getpid () lxor int_of_float (Unix.gettimeofday () *. 1000000.));
175175+ f x;
176176+ exit 0
177177+ | child -> IntSet.add child acc)
178178+ IntSet.empty lst
179179+ |> IntSet.iter (fun pid -> ignore (Unix.waitpid [] pid))
180180+181181+(** Fork with progress callback. [on_complete status] is called each time a worker finishes.
182182+ [status] is the exit code (0 = success, non-zero = failure). *)
183183+let fork_with_progress ?np ~on_complete f lst =
184184+ let nproc = Option.value ~default:(nproc ()) np in
185185+ let status_of_wait = function
186186+ | Unix.WEXITED c -> c
187187+ | Unix.WSIGNALED _ | Unix.WSTOPPED _ -> -1
188188+ in
189189+ (* Try to reap finished processes, returning (still_running, exit_codes) *)
190190+ let reap_finished pids =
191191+ IntSet.fold (fun pid (running, codes) ->
192192+ let c, status = Unix.waitpid [ WNOHANG ] pid in
193193+ if c = pid then
194194+ (running, status_of_wait status :: codes)
195195+ else
196196+ (IntSet.add pid running, codes)
197197+ ) pids (IntSet.empty, [])
198198+ in
199199+ List.fold_left
200200+ (fun acc x ->
201201+ let acc =
202202+ let rec loop acc =
203203+ if IntSet.cardinal acc <= nproc then acc
204204+ else
205205+ let running, codes = reap_finished acc in
206206+ List.iter on_complete codes;
207207+ let () = if codes = [] then Unix.sleepf 0.1 in
208208+ loop running
209209+ in
210210+ loop acc
211211+ in
212212+ match Unix.fork () with
213213+ | 0 ->
214214+ (* Reseed RNG after fork using PID to avoid temp directory collisions *)
215215+ Random.init (Unix.getpid () lxor int_of_float (Unix.gettimeofday () *. 1000000.));
216216+ (try f x with exn ->
217217+ Printf.eprintf "Worker exception: %s\n%!" (Printexc.to_string exn);
218218+ exit 1);
219219+ exit 0
220220+ | child -> IntSet.add child acc)
221221+ IntSet.empty lst
222222+ |> fun remaining ->
223223+ (* Wait for all remaining processes *)
224224+ IntSet.iter (fun pid ->
225225+ let _, status = Unix.waitpid [] pid in
226226+ on_complete (status_of_wait status)
227227+ ) remaining
228228+229229+(** Fork processes to run function on list items in parallel, collecting results.
230230+ Each process writes its result to a temp file, parent collects after all complete.
231231+ Returns list of (input, result option) pairs in original order. *)
232232+let fork_map ?np ~temp_dir ~serialize ~deserialize f lst =
233233+ let nproc = Option.value ~default:(nproc ()) np in
234234+ let indexed = List.mapi (fun i x -> (i, x)) lst in
235235+ (* Fork processes *)
236236+ let pids = List.fold_left
237237+ (fun acc (i, x) ->
238238+ let acc =
239239+ let rec loop acc =
240240+ if IntSet.cardinal acc <= nproc then acc
241241+ else
242242+ let running, finished =
243243+ IntSet.partition
244244+ (fun pid ->
245245+ let c, _ = Unix.waitpid [ WNOHANG ] pid in
246246+ pid <> c)
247247+ acc
248248+ in
249249+ let () = if IntSet.is_empty finished then Unix.sleepf 0.1 in
250250+ loop running
251251+ in
252252+ loop acc
253253+ in
254254+ match Unix.fork () with
255255+ | 0 ->
256256+ (* Reseed RNG after fork using PID to avoid temp directory collisions *)
257257+ Random.init (Unix.getpid () lxor int_of_float (Unix.gettimeofday () *. 1000000.));
258258+ let result = f x in
259259+ let result_file = Filename.concat temp_dir (string_of_int i) in
260260+ (match result with
261261+ | Some r -> write_to_file result_file (serialize r)
262262+ | None -> ());
263263+ exit 0
264264+ | child -> IntSet.add child acc)
265265+ IntSet.empty indexed
266266+ in
267267+ IntSet.iter (fun pid -> ignore (Unix.waitpid [] pid)) pids;
268268+ (* Collect results *)
269269+ List.map (fun (i, x) ->
270270+ let result_file = Filename.concat temp_dir (string_of_int i) in
271271+ let result =
272272+ if Sys.file_exists result_file then
273273+ Some (deserialize (read_from_file result_file))
274274+ else
275275+ None
276276+ in
277277+ (x, result)
278278+ ) indexed
279279+280280+(** Lock info for tracking active builds/docs/tools.
281281+ When provided, locks are created in a central directory with descriptive names. *)
282282+type lock_info = {
283283+ cache_dir : string;
284284+ stage : [`Build | `Doc | `Tool];
285285+ package : string;
286286+ version : string;
287287+ universe : string option; (* For Build/Doc: dependency hash. For Tool: OCaml version if applicable *)
288288+ layer_name : string option; (* The final layer directory name, for finding logs after completion *)
289289+}
290290+291291+(** Generate lock filename from lock info *)
292292+let lock_filename info =
293293+ match info.stage, info.universe with
294294+ | `Build, Some u -> Printf.sprintf "build-%s.%s-%s.lock" info.package info.version u
295295+ | `Build, None -> Printf.sprintf "build-%s.%s.lock" info.package info.version
296296+ | `Doc, Some u -> Printf.sprintf "doc-%s.%s-%s.lock" info.package info.version u
297297+ | `Doc, None -> Printf.sprintf "doc-%s.%s.lock" info.package info.version
298298+ | `Tool, Some ocaml_ver -> Printf.sprintf "tool-%s-%s.lock" info.package ocaml_ver
299299+ | `Tool, None -> Printf.sprintf "tool-%s.lock" info.package
300300+301301+(** Get or create locks directory *)
302302+let locks_dir cache_dir =
303303+ let dir = Path.(cache_dir / "locks") in
304304+ if not (Sys.file_exists dir) then
305305+ (try Unix.mkdir dir 0o755 with Unix.Unix_error (Unix.EEXIST, _, _) -> ());
306306+ dir
307307+308308+let create_directory_exclusively ?marker_file ?lock_info dir_name write_function =
309309+ (* Determine lock file location based on whether lock_info is provided *)
310310+ let lock_file = match lock_info with
311311+ | Some info -> Path.(locks_dir info.cache_dir / lock_filename info)
312312+ | None -> dir_name ^ ".lock"
313313+ in
314314+ let lock_fd = Unix.openfile lock_file [ O_CREAT; O_RDWR ] 0o644 in
315315+ let dir_basename = Filename.basename dir_name in
316316+ (* Try non-blocking lock first to detect contention *)
317317+ let got_lock_immediately =
318318+ try Unix.lockf lock_fd F_TLOCK 0; true with
319319+ | Unix.Unix_error (Unix.EAGAIN, _, _)
320320+ | Unix.Unix_error (Unix.EACCES, _, _) -> false
321321+ | Unix.Unix_error (Unix.EINTR, _, _) -> false
322322+ in
323323+ if not got_lock_immediately then begin
324324+ log "Waiting for lock: %s" dir_basename;
325325+ (* Retry lockf on EINTR (interrupted by signal) *)
326326+ let rec lock_with_retry () =
327327+ try Unix.lockf lock_fd F_LOCK 0 with
328328+ | Unix.Unix_error (Unix.EINTR, _, _) -> lock_with_retry ()
329329+ in
330330+ lock_with_retry ();
331331+ log "Acquired lock: %s" dir_basename
332332+ end;
333333+ (* Write lock metadata for monitoring:
334334+ Line 1: PID
335335+ Line 2: start time
336336+ Line 3: layer name (for finding logs after completion)
337337+ Line 4: temp log path (updated by write_function for live logs) *)
338338+ let layer_name = match lock_info with
339339+ | Some info -> Option.value ~default:"" info.layer_name
340340+ | None -> ""
341341+ in
342342+ let write_metadata ?temp_log_path () =
343343+ match lock_info with
344344+ | Some _ ->
345345+ let temp_log = Option.value ~default:"" temp_log_path in
346346+ let metadata = Printf.sprintf "%d\n%.0f\n%s\n%s\n" (Unix.getpid ()) (Unix.time ()) layer_name temp_log in
347347+ ignore (Unix.lseek lock_fd 0 Unix.SEEK_SET);
348348+ ignore (Unix.ftruncate lock_fd 0);
349349+ ignore (Unix.write_substring lock_fd metadata 0 (String.length metadata))
350350+ | None -> ()
351351+ in
352352+ write_metadata ();
353353+ (* Callback for write_function to update the temp log path for live viewing *)
354354+ let set_temp_log_path path = write_metadata ~temp_log_path:path () in
355355+ (* Check marker_file if provided, otherwise check directory existence *)
356356+ let already_complete = match marker_file with
357357+ | Some f -> Sys.file_exists f
358358+ | None -> Sys.file_exists dir_name
359359+ in
360360+ if not already_complete then begin
361361+ log "Building: %s" dir_basename;
362362+ write_function ~set_temp_log_path dir_name;
363363+ log "Completed: %s" dir_basename
364364+ end;
365365+ Unix.close lock_fd;
366366+ (* Only delete lock file if no lock_info (old behavior) -
367367+ with lock_info, we keep the file for stale cleanup later *)
368368+ (match lock_info with
369369+ | None -> (try Unix.unlink lock_file with _ -> ())
370370+ | Some _ -> ())
371371+372372+exception Copy_error of string
373373+374374+let cp ?(buffer_size = 65536) ?(preserve_permissions = true) ?(preserve_times = true) src dst =
375375+ let safe_close fd =
376376+ try Unix.close fd with
377377+ | _ -> ()
378378+ in
379379+ let src_stats =
380380+ try Unix.stat src with
381381+ | Unix.Unix_error (err, _, _) -> raise (Copy_error (Printf.sprintf "Cannot stat source file '%s': %s" src (Unix.error_message err)))
382382+ in
383383+ if src_stats.st_kind <> S_REG then raise (Copy_error (Printf.sprintf "Source '%s' is not a regular file" src));
384384+ let src_fd =
385385+ try Unix.openfile src [ O_RDONLY ] 0 with
386386+ | Unix.Unix_error (err, _, _) -> raise (Copy_error (Printf.sprintf "Cannot open source file '%s': %s" src (Unix.error_message err)))
387387+ in
388388+ let dst_fd =
389389+ try Unix.openfile dst [ O_WRONLY; O_CREAT; O_TRUNC ] src_stats.st_perm with
390390+ | Unix.Unix_error (err, _, _) ->
391391+ safe_close src_fd;
392392+ raise (Copy_error (Printf.sprintf "Cannot open destination file '%s': %s" dst (Unix.error_message err)))
393393+ in
394394+ let buffer = Bytes.create buffer_size in
395395+ let rec copy_loop () =
396396+ try
397397+ match Unix.read src_fd buffer 0 buffer_size with
398398+ | 0 -> ()
399399+ | bytes_read ->
400400+ let rec write_all pos remaining =
401401+ if remaining > 0 then
402402+ let bytes_written = Unix.write dst_fd buffer pos remaining in
403403+ write_all (pos + bytes_written) (remaining - bytes_written)
404404+ in
405405+ write_all 0 bytes_read;
406406+ copy_loop ()
407407+ with
408408+ | Unix.Unix_error (err, _, _) ->
409409+ safe_close src_fd;
410410+ safe_close dst_fd;
411411+ raise (Copy_error (Printf.sprintf "Error during copy: %s" (Unix.error_message err)))
412412+ in
413413+ copy_loop ();
414414+ safe_close src_fd;
415415+ safe_close dst_fd;
416416+ (if preserve_permissions then
417417+ try Unix.chmod dst src_stats.st_perm with
418418+ | Unix.Unix_error (err, _, _) -> Printf.eprintf "Warning: Could not preserve permissions: %s\n" (Unix.error_message err));
419419+ if preserve_times then
420420+ try Unix.utimes dst src_stats.st_atime src_stats.st_mtime with
421421+ | Unix.Unix_error (err, _, _) -> Printf.eprintf "Warning: Could not preserve timestamps: %s\n" (Unix.error_message err)
422422+423423+let hardlink_tree ~source ~target =
424424+ let rec process_directory current_source current_target =
425425+ let entries = Sys.readdir current_source in
426426+ Array.iter
427427+ (fun entry ->
428428+ let source = Filename.concat current_source entry in
429429+ let target = Filename.concat current_target entry in
430430+ try
431431+ let stat = Unix.lstat source in
432432+ match stat.st_kind with
433433+ | S_LNK -> if not (Sys.file_exists target) then Unix.symlink (Unix.readlink source) target
434434+ | S_REG -> if not (Sys.file_exists target) then Unix.link source target
435435+ | S_DIR ->
436436+ mkdir target;
437437+ process_directory source target
438438+ | S_CHR
439439+ | S_BLK
440440+ | S_FIFO
441441+ | S_SOCK ->
442442+ ()
443443+ with
444444+ | Unix.Unix_error (Unix.EMLINK, _, _) -> cp source target
445445+ | Unix.Unix_error (err, _, _) -> Printf.eprintf "Warning: %s -> %s = %s\n" source target (Unix.error_message err))
446446+ entries
447447+ in
448448+ process_directory source target
449449+450450+let clense_tree ~source ~target =
451451+ let rec process_directory current_source current_target =
452452+ let entries = Sys.readdir current_source in
453453+ Array.iter
454454+ (fun entry ->
455455+ let source = Filename.concat current_source entry in
456456+ let target = Filename.concat current_target entry in
457457+ try
458458+ let src_stat = Unix.lstat source in
459459+ match src_stat.st_kind with
460460+ | Unix.S_LNK -> if Sys.file_exists target then if Unix.readlink source = Unix.readlink target then Unix.unlink target
461461+ | Unix.S_REG ->
462462+ if Sys.file_exists target then
463463+ let tgt_stat = Unix.lstat target in
464464+ if src_stat.st_mtime = tgt_stat.st_mtime then (
465465+ try Unix.unlink target with
466466+ | Unix.Unix_error (Unix.EACCES, _, _) ->
467467+ Unix.chmod target (src_stat.st_perm lor 0o222);
468468+ Unix.unlink target)
469469+ | Unix.S_DIR -> (
470470+ process_directory source target;
471471+ try
472472+ if Sys.file_exists target then
473473+ let target_entries = Sys.readdir target in
474474+ if Array.length target_entries = 0 then Unix.rmdir target
475475+ with
476476+ | Unix.Unix_error (err, _, _) -> Printf.eprintf "Warning: rmdir %s = %s\n" target (Unix.error_message err))
477477+ | S_CHR
478478+ | S_BLK
479479+ | S_FIFO
480480+ | S_SOCK ->
481481+ ()
482482+ with
483483+ | Unix.Unix_error (err, _, _) -> Printf.eprintf "Warning: unlink %s = %s\n" target (Unix.error_message err))
484484+ entries
485485+ in
486486+ process_directory source target
487487+488488+let copy_tree ~source ~target =
489489+ let rec process_directory current_source current_target =
490490+ let entries = Sys.readdir current_source in
491491+ Array.iter
492492+ (fun entry ->
493493+ let source = Filename.concat current_source entry in
494494+ let target = Filename.concat current_target entry in
495495+ try
496496+ let stat = Unix.lstat source in
497497+ match stat.st_kind with
498498+ | S_LNK -> if not (Sys.file_exists target) then Unix.symlink (Unix.readlink source) target
499499+ | S_REG -> if not (Sys.file_exists target) then cp source target
500500+ | S_DIR ->
501501+ mkdir target;
502502+ process_directory source target
503503+ | S_CHR
504504+ | S_BLK
505505+ | S_FIFO
506506+ | S_SOCK ->
507507+ ()
508508+ with
509509+ | Copy_error _ ->
510510+ Printf.eprintf "Warning: hard linking %s -> %s\n" source target;
511511+ Unix.link source target
512512+ | Unix.Unix_error (err, _, _) -> Printf.eprintf "Warning: %s -> %s = %s\n" source target (Unix.error_message err))
513513+ entries
514514+ in
515515+ process_directory source target
516516+517517+let ls ?extn dir =
518518+ try
519519+ let files = Sys.readdir dir |> Array.to_list |> List.map (Filename.concat dir) in
520520+ match extn with
521521+ | None -> files
522522+ | Some ext ->
523523+ let ext = if ext <> "" && ext.[0] = '.' then ext else "." ^ ext in
524524+ List.filter (fun f -> Filename.check_suffix f ext) files
525525+ with
526526+ | Sys_error _ -> []
527527+528528+(** Atomic directory swap for graceful degradation.
529529+530530+ This module provides atomic swap operations for documentation directories,
531531+ implementing the "fresh docs with graceful degradation" pattern:
532532+ - Write new docs to a staging directory ({dir}.new)
533533+ - On success, atomically swap: old -> .old, new -> current, remove .old
534534+ - On failure, leave original docs untouched
535535+536536+ Recovery: On startup, clean up any stale .new or .old directories left
537537+ from interrupted swaps. *)
538538+539539+module Atomic_swap = struct
540540+ (** Clean up stale .new and .old directories from interrupted swaps.
541541+ Call this on startup before processing packages. *)
542542+ let cleanup_stale_dirs ~html_dir =
543543+ let p_dir = Filename.concat html_dir "p" in
544544+ if Sys.file_exists p_dir && Sys.is_directory p_dir then begin
545545+ try
546546+ Sys.readdir p_dir |> Array.iter (fun pkg_name ->
547547+ let pkg_dir = Filename.concat p_dir pkg_name in
548548+ if Sys.is_directory pkg_dir then begin
549549+ try
550550+ Sys.readdir pkg_dir |> Array.iter (fun version_dir ->
551551+ (* Clean up .new directories - incomplete writes *)
552552+ if Filename.check_suffix version_dir ".new" then begin
553553+ let stale_new = Filename.concat pkg_dir version_dir in
554554+ log "Cleaning up stale .new directory: %s" stale_new;
555555+ sudo_rm_rf stale_new
556556+ end
557557+ (* Clean up .old directories - incomplete swap *)
558558+ else if Filename.check_suffix version_dir ".old" then begin
559559+ let stale_old = Filename.concat pkg_dir version_dir in
560560+ log "Cleaning up stale .old directory: %s" stale_old;
561561+ sudo_rm_rf stale_old
562562+ end
563563+ )
564564+ with _ -> ()
565565+ end
566566+ )
567567+ with _ -> ()
568568+ end;
569569+ (* Also clean up universe directories *)
570570+ let u_dir = Filename.concat html_dir "u" in
571571+ if Sys.file_exists u_dir && Sys.is_directory u_dir then begin
572572+ try
573573+ Sys.readdir u_dir |> Array.iter (fun universe_hash ->
574574+ let universe_dir = Filename.concat u_dir universe_hash in
575575+ if Sys.is_directory universe_dir then begin
576576+ try
577577+ Sys.readdir universe_dir |> Array.iter (fun pkg_name ->
578578+ let pkg_dir = Filename.concat universe_dir pkg_name in
579579+ if Sys.is_directory pkg_dir then begin
580580+ try
581581+ Sys.readdir pkg_dir |> Array.iter (fun version_dir ->
582582+ if Filename.check_suffix version_dir ".new" then begin
583583+ let stale_new = Filename.concat pkg_dir version_dir in
584584+ log "Cleaning up stale .new directory: %s" stale_new;
585585+ sudo_rm_rf stale_new
586586+ end
587587+ else if Filename.check_suffix version_dir ".old" then begin
588588+ let stale_old = Filename.concat pkg_dir version_dir in
589589+ log "Cleaning up stale .old directory: %s" stale_old;
590590+ sudo_rm_rf stale_old
591591+ end
592592+ )
593593+ with _ -> ()
594594+ end
595595+ )
596596+ with _ -> ()
597597+ end
598598+ )
599599+ with _ -> ()
600600+ end
601601+602602+ (** Get paths for atomic swap operations.
603603+ Returns (staging_dir, final_dir, old_dir) where:
604604+ - staging_dir: {version}.new - where new docs are written
605605+ - final_dir: {version} - the live docs location
606606+ - old_dir: {version}.old - backup during swap *)
607607+ let get_swap_paths ~html_dir ~pkg ~version ~blessed ~universe =
608608+ let base_dir =
609609+ if blessed then
610610+ Filename.concat (Filename.concat html_dir "p") pkg
611611+ else
612612+ Filename.concat (Filename.concat (Filename.concat html_dir "u") universe) pkg
613613+ in
614614+ let final_dir = Filename.concat base_dir version in
615615+ let staging_dir = final_dir ^ ".new" in
616616+ let old_dir = final_dir ^ ".old" in
617617+ (staging_dir, final_dir, old_dir)
618618+619619+ (** Prepare staging directory for a package.
620620+ Creates the .new directory for doc generation.
621621+ Returns the staging path. *)
622622+ let prepare_staging ~html_dir ~pkg ~version ~blessed ~universe =
623623+ let staging_dir, _, _ = get_swap_paths ~html_dir ~pkg ~version ~blessed ~universe in
624624+ (* Remove any existing .new directory from failed previous attempt *)
625625+ if Sys.file_exists staging_dir then sudo_rm_rf staging_dir;
626626+ (* Create the staging directory structure *)
627627+ mkdir ~parents:true staging_dir;
628628+ staging_dir
629629+630630+ (** Commit staging to final location atomically.
631631+ Performs the swap: final -> .old, staging -> final, remove .old
632632+ Returns true on success, false on failure. *)
633633+ let commit ~html_dir ~pkg ~version ~blessed ~universe =
634634+ let staging_dir, final_dir, old_dir = get_swap_paths ~html_dir ~pkg ~version ~blessed ~universe in
635635+ if not (Sys.file_exists staging_dir) then begin
636636+ log "commit: staging directory does not exist: %s" staging_dir;
637637+ false
638638+ end else begin
639639+ log "commit: swapping %s -> %s" staging_dir final_dir;
640640+ (* Step 1: If final exists, move to .old *)
641641+ let has_existing = Sys.file_exists final_dir in
642642+ (if has_existing then begin
643643+ (* Remove any stale .old first *)
644644+ if Sys.file_exists old_dir then sudo_rm_rf old_dir;
645645+ try Unix.rename final_dir old_dir with
646646+ | Unix.Unix_error (err, _, _) ->
647647+ log "commit: failed to rename %s to %s: %s" final_dir old_dir (Unix.error_message err);
648648+ raise Exit
649649+ end);
650650+ (* Step 2: Move staging to final *)
651651+ (try Unix.rename staging_dir final_dir with
652652+ | Unix.Unix_error (err, _, _) ->
653653+ log "commit: failed to rename %s to %s: %s" staging_dir final_dir (Unix.error_message err);
654654+ (* Try to restore old if we moved it *)
655655+ if has_existing && Sys.file_exists old_dir then begin
656656+ try Unix.rename old_dir final_dir with _ -> ()
657657+ end;
658658+ raise Exit);
659659+ (* Step 3: Remove .old backup *)
660660+ if has_existing && Sys.file_exists old_dir then
661661+ sudo_rm_rf old_dir;
662662+ log "commit: successfully swapped docs for %s/%s" pkg version;
663663+ true
664664+ end
665665+666666+ (** Rollback staging on failure.
667667+ Removes the .new directory, leaving original docs intact. *)
668668+ let rollback ~html_dir ~pkg ~version ~blessed ~universe =
669669+ let staging_dir, _, _ = get_swap_paths ~html_dir ~pkg ~version ~blessed ~universe in
670670+ if Sys.file_exists staging_dir then begin
671671+ log "rollback: removing staging directory %s" staging_dir;
672672+ sudo_rm_rf staging_dir
673673+ end
674674+end
···11+(** Documentation generation phase *)
22+type doc_phase =
33+ | Doc_all (** Run compile, link, and html-generate together *)
44+ | Doc_compile_only (** Run only compile phase (for packages with post deps) *)
55+ | Doc_link_only (** Run only link and html-generate (after post deps built) *)
66+77+module type CONTAINER = sig
88+ type t
99+1010+ val init : config:Config.t -> t
1111+ val deinit : t:t -> unit
1212+ val config : t:t -> Config.t
1313+ val run : t:t -> temp_dir:string -> string -> string -> int
1414+ val build : t:t -> temp_dir:string -> string -> OpamPackage.t -> string list -> int
1515+ val layer_hash : t:t -> OpamPackage.t list -> string
1616+1717+ (** Compute hash for a doc layer.
1818+ The doc hash depends on the build hash, dependency doc hashes,
1919+ driver layer hash, odoc layer hash, and blessing status. *)
2020+ val doc_layer_hash :
2121+ t:t ->
2222+ build_hash:string ->
2323+ dep_doc_hashes:string list ->
2424+ ocaml_version:OpamPackage.t ->
2525+ blessed:bool ->
2626+ string
2727+2828+ (** Documentation generation support.
2929+ [phase] controls which phases to run:
3030+ - [Doc_all]: Run all phases (compile, link, html) - for packages without post deps
3131+ - [Doc_compile_only]: Run only compile - for packages with post deps
3232+ - [Doc_link_only]: Run only link and html - after post deps are built
3333+3434+ [build_layer_dir] is the build layer (for .cmti files via prep).
3535+ [doc_layer_dir] is the doc layer (for compile output and prep structure).
3636+ [dep_doc_hashes] are the doc layer hashes of dependencies.
3737+ [ocaml_version] is the OCaml compiler package from the solution.
3838+ Returns None if doc generation is not supported on this platform. *)
3939+ val generate_docs :
4040+ t:t ->
4141+ build_layer_dir:string ->
4242+ doc_layer_dir:string ->
4343+ dep_doc_hashes:string list ->
4444+ pkg:OpamPackage.t ->
4545+ installed_libs:string list ->
4646+ installed_docs:string list ->
4747+ phase:doc_phase ->
4848+ ocaml_version:OpamPackage.t ->
4949+ Yojson.Safe.t option
5050+5151+ (** Compute hash for a jtw layer.
5252+ Depends on the build hash and jtw-tools layer hash. *)
5353+ val jtw_layer_hash :
5454+ t:t ->
5555+ build_hash:string ->
5656+ ocaml_version:OpamPackage.t ->
5757+ string
5858+5959+ (** JTW generation: compile .cma to .cma.js, extract .cmi, META, generate dynamic_cmis.json.
6060+ [build_layer_dir] is the build layer (for .cma/.cmi files).
6161+ [jtw_layer_dir] is the output jtw layer.
6262+ [dep_build_hashes] are the build layer hashes of dependencies.
6363+ [installed_libs] are files installed by this package.
6464+ Returns Some json on success/failure, None if not supported. *)
6565+ val generate_jtw :
6666+ t:t ->
6767+ build_layer_dir:string ->
6868+ jtw_layer_dir:string ->
6969+ dep_build_hashes:string list ->
7070+ pkg:OpamPackage.t ->
7171+ installed_libs:string list ->
7272+ ocaml_version:OpamPackage.t ->
7373+ Yojson.Safe.t option
7474+end
+181
day10/bin/sync_docs.ml
···11+(** Documentation sync functionality.
22+33+ Scans the cache for layers with successful documentation and rsyncs
44+ the HTML to a destination (local path, SSH, or rsync server). *)
55+66+type doc_entry = {
77+ pkg : OpamPackage.t;
88+ html_path : string;
99+ universe : string;
1010+ blessed : bool;
1111+}
1212+1313+(** Extract universe hash from html_path.
1414+ Path format: .../prep/universes/{universe}/{pkg}/{version}/html *)
1515+let extract_universe html_path =
1616+ let parts = String.split_on_char '/' html_path in
1717+ let rec find_after_universes = function
1818+ | "universes" :: universe :: _ -> Some universe
1919+ | _ :: rest -> find_after_universes rest
2020+ | [] -> None
2121+ in
2222+ find_after_universes parts
2323+2424+(** Parse layer.json and extract doc info if successful *)
2525+let parse_layer_json path =
2626+ try
2727+ let json = Yojson.Safe.from_file path in
2828+ let open Yojson.Safe.Util in
2929+ let pkg_str = json |> member "package" |> to_string in
3030+ let pkg = OpamPackage.of_string pkg_str in
3131+ (* Check for doc field *)
3232+ match json |> member "doc" with
3333+ | `Null -> None
3434+ | doc ->
3535+ let status = doc |> member "status" |> to_string in
3636+ if status <> "success" then None
3737+ else
3838+ let html_path = doc |> member "html_path" |> to_string in
3939+ let blessed = doc |> member "blessed" |> to_bool in
4040+ let universe = extract_universe html_path |> Option.value ~default:"unknown" in
4141+ Some { pkg; html_path; universe; blessed }
4242+ with
4343+ | _ -> None
4444+4545+(** Check if a directory name is a doc layer (doc-{hash}, but not doc-driver- or doc-odoc-) *)
4646+let is_doc_layer_dir name =
4747+ let len = String.length name in
4848+ len > 4 && String.sub name 0 4 = "doc-"
4949+ && not (len > 11 && String.sub name 0 11 = "doc-driver-")
5050+ && not (len > 9 && String.sub name 0 9 = "doc-odoc-")
5151+5252+(** Scan cache directory for all doc layers with successful docs *)
5353+let scan_cache ~cache_dir ~os_key =
5454+ let cache_path = Path.(cache_dir / os_key) in
5555+ if not (Sys.file_exists cache_path) then []
5656+ else
5757+ let entries = Sys.readdir cache_path |> Array.to_list in
5858+ let doc_entries = List.filter is_doc_layer_dir entries in
5959+ List.filter_map
6060+ (fun entry ->
6161+ let layer_json = Path.(cache_path / entry / "layer.json") in
6262+ if Sys.file_exists layer_json then parse_layer_json layer_json
6363+ else None)
6464+ doc_entries
6565+6666+(** Compute destination path for a doc entry *)
6767+let destination_path ~entry =
6868+ let pkg_name = OpamPackage.name_to_string entry.pkg in
6969+ let pkg_version = OpamPackage.version_to_string entry.pkg in
7070+ if entry.blessed then
7171+ Printf.sprintf "%s/%s/" pkg_name pkg_version
7272+ else
7373+ Printf.sprintf "universes/%s/%s/%s/" entry.universe pkg_name pkg_version
7474+7575+(** Run rsync to sync documentation *)
7676+let rsync ~src ~dst ~dry_run =
7777+ (* Create parent directories first *)
7878+ let dst_dir = Filename.dirname dst in
7979+ if not dry_run then begin
8080+ let mkdir_cmd = Printf.sprintf "mkdir -p '%s'" dst_dir in
8181+ ignore (Sys.command mkdir_cmd)
8282+ end;
8383+ let args =
8484+ [
8585+ "rsync";
8686+ "-av";
8787+ "--delete";
8888+ ]
8989+ @ (if dry_run then [ "--dry-run" ] else [])
9090+ @ [ src ^ "/"; dst ]
9191+ in
9292+ let cmd = String.concat " " args in
9393+ if dry_run then Printf.printf "Would run: %s\n%!" cmd;
9494+ let exit_code = Sys.command cmd in
9595+ exit_code = 0
9696+9797+(** Sync all documentation to destination *)
9898+let sync ~cache_dir ~os_key ~destination ~dry_run ~blessed_only ~package_filter =
9999+ let entries = scan_cache ~cache_dir ~os_key in
100100+ let entries =
101101+ if blessed_only then List.filter (fun e -> e.blessed) entries
102102+ else entries
103103+ in
104104+ let entries =
105105+ match package_filter with
106106+ | None -> entries
107107+ | Some pkg_name ->
108108+ List.filter (fun e -> OpamPackage.name_to_string e.pkg = pkg_name) entries
109109+ in
110110+ Printf.printf "Found %d documentation entries to sync\n%!" (List.length entries);
111111+ let synced = ref 0 in
112112+ let failed = ref 0 in
113113+ List.iter
114114+ (fun entry ->
115115+ let src = entry.html_path in
116116+ let dst_path = destination_path ~entry in
117117+ let dst = destination ^ "/" ^ dst_path in
118118+ if Sys.file_exists src then begin
119119+ Printf.printf "Syncing %s -> %s\n%!" (OpamPackage.to_string entry.pkg) dst_path;
120120+ if rsync ~src ~dst ~dry_run then
121121+ incr synced
122122+ else begin
123123+ Printf.eprintf "Failed to sync %s\n%!" (OpamPackage.to_string entry.pkg);
124124+ incr failed
125125+ end
126126+ end
127127+ else begin
128128+ Printf.eprintf "HTML path does not exist: %s\n%!" src;
129129+ incr failed
130130+ end)
131131+ entries;
132132+ Printf.printf "Synced: %d, Failed: %d\n%!" !synced !failed;
133133+ !failed = 0
134134+135135+(** Generate index of all synced packages *)
136136+let generate_index ~cache_dir ~os_key ~destination ~dry_run =
137137+ let entries = scan_cache ~cache_dir ~os_key in
138138+ let blessed = List.filter (fun e -> e.blessed) entries in
139139+ let index_content =
140140+ let buf = Buffer.create 4096 in
141141+ Buffer.add_string buf "<!DOCTYPE html>\n<html>\n<head>\n";
142142+ Buffer.add_string buf " <title>OCaml Package Documentation</title>\n";
143143+ Buffer.add_string buf " <style>\n";
144144+ Buffer.add_string buf " body { font-family: sans-serif; max-width: 800px; margin: 0 auto; padding: 20px; }\n";
145145+ Buffer.add_string buf " ul { list-style: none; padding: 0; }\n";
146146+ Buffer.add_string buf " li { padding: 5px 0; }\n";
147147+ Buffer.add_string buf " a { color: #0066cc; text-decoration: none; }\n";
148148+ Buffer.add_string buf " a:hover { text-decoration: underline; }\n";
149149+ Buffer.add_string buf " </style>\n";
150150+ Buffer.add_string buf "</head>\n<body>\n";
151151+ Buffer.add_string buf " <h1>OCaml Package Documentation</h1>\n";
152152+ Buffer.add_string buf " <ul>\n";
153153+ List.iter
154154+ (fun entry ->
155155+ let pkg_name = OpamPackage.name_to_string entry.pkg in
156156+ let pkg_version = OpamPackage.version_to_string entry.pkg in
157157+ let href = Printf.sprintf "%s/%s/" pkg_name pkg_version in
158158+ Buffer.add_string buf
159159+ (Printf.sprintf " <li><a href=\"%s\">%s.%s</a></li>\n" href pkg_name pkg_version))
160160+ (List.sort (fun a b -> OpamPackage.compare a.pkg b.pkg) blessed);
161161+ Buffer.add_string buf " </ul>\n";
162162+ Buffer.add_string buf "</body>\n</html>\n";
163163+ Buffer.contents buf
164164+ in
165165+ if dry_run then begin
166166+ Printf.printf "Would write index.html with %d packages\n%!" (List.length blessed);
167167+ true
168168+ end
169169+ else begin
170170+ let index_path = destination ^ "/index.html" in
171171+ try
172172+ let oc = open_out index_path in
173173+ output_string oc index_content;
174174+ close_out oc;
175175+ Printf.printf "Generated index.html with %d packages\n%!" (List.length blessed);
176176+ true
177177+ with
178178+ | exn ->
179179+ Printf.eprintf "Failed to write index: %s\n%!" (Printexc.to_string exn);
180180+ false
181181+ end
+220
day10/bin/util.ml
···11+let std_env ?(ocaml_native = true) ?opam_version ~arch ~os ~os_distribution ~os_family ~os_version ?ocaml_version () = function
22+ | "arch" -> Some (OpamTypes.S arch)
33+ | "os" -> Some (OpamTypes.S os)
44+ | "os-distribution" -> Some (OpamTypes.S os_distribution)
55+ | "os-version" -> Some (OpamTypes.S os_version)
66+ | "os-family" -> Some (OpamTypes.S os_family)
77+ | "opam-version" -> Some (OpamVariable.S (Option.value ~default:OpamVersion.(to_string current) opam_version))
88+ (* There is no system compiler *)
99+ | "sys-ocaml-arch"
1010+ | "sys-ocaml-cc"
1111+ | "sys-ocaml-libc"
1212+ | "sys-ocaml-system"
1313+ | "sys-ocaml-version" ->
1414+ Some (OpamTypes.S "")
1515+ | "ocaml:native" -> Some (OpamTypes.B ocaml_native)
1616+ | "ocaml:version" -> Option.map (fun v -> OpamTypes.S (OpamPackage.version_to_string v)) ocaml_version
1717+ | "enable-ocaml-beta-repository" -> None (* Fake variable? *)
1818+ | _ ->
1919+ None
2020+2121+let save_layer_info ?installed_libs ?installed_docs name pkg deps hashes rc =
2222+ let base_fields =
2323+ [
2424+ ("package", `String (OpamPackage.to_string pkg));
2525+ ("exit_status", `Int rc);
2626+ ("deps", `List (List.map (fun p -> `String (OpamPackage.to_string p)) deps));
2727+ ("hashes", `List (List.map (fun h -> `String h) hashes));
2828+ ("created", `Float (Unix.time ()));
2929+ ]
3030+ in
3131+ let fields = base_fields in
3232+ let fields =
3333+ match installed_libs with
3434+ | None -> fields
3535+ | Some libs -> fields @ [ ("installed_libs", `List (List.map (fun s -> `String s) libs)) ]
3636+ in
3737+ let fields =
3838+ match installed_docs with
3939+ | None -> fields
4040+ | Some docs -> fields @ [ ("installed_docs", `List (List.map (fun s -> `String s) docs)) ]
4141+ in
4242+ Yojson.Safe.to_file name (`Assoc fields)
4343+4444+(** Ensure a symlink exists from packages/{pkg_str}/{layer_name} -> ../../{layer_name}
4545+ This enables tracking all builds/docs for a package.version. *)
4646+let ensure_package_layer_symlink ~cache_dir ~os_key ~pkg_str ~layer_name =
4747+ let pkg_dir = Path.(cache_dir / os_key / "packages" / pkg_str) in
4848+ let symlink_path = Path.(pkg_dir / layer_name) in
4949+ let target = Path.(".." / ".." / layer_name) in
5050+ (* Create package directory if needed *)
5151+ if not (Sys.file_exists pkg_dir) then
5252+ Os.mkdir ~parents:true pkg_dir;
5353+ (* Create symlink if it doesn't exist. Handle race condition where another
5454+ worker creates it between our check and symlink call. *)
5555+ if not (Sys.file_exists symlink_path) then
5656+ (try Unix.symlink target symlink_path with Unix.Unix_error (Unix.EEXIST, _, _) -> ())
5757+5858+(** Ensure blessed-build or blessed-docs symlink exists for a package.
5959+ These point to the layer that produced the blessed (canonical) docs. *)
6060+let ensure_package_blessed_symlink ~cache_dir ~os_key ~pkg_str ~kind ~layer_name =
6161+ let pkg_dir = Path.(cache_dir / os_key / "packages" / pkg_str) in
6262+ let symlink_name = match kind with `Build -> "blessed-build" | `Docs -> "blessed-docs" in
6363+ let symlink_path = Path.(pkg_dir / symlink_name) in
6464+ let target = Path.(".." / ".." / layer_name) in
6565+ (* Create package directory if needed *)
6666+ if not (Sys.file_exists pkg_dir) then
6767+ Os.mkdir ~parents:true pkg_dir;
6868+ (* Create or update symlink (blessed can change between runs).
6969+ Handle race condition where another worker creates the symlink between
7070+ our unlink and symlink calls. *)
7171+ (try Unix.unlink symlink_path with Unix.Unix_error (Unix.ENOENT, _, _) -> ());
7272+ (try Unix.symlink target symlink_path with Unix.Unix_error (Unix.EEXIST, _, _) -> ())
7373+7474+let save_doc_layer_info ?doc_result name pkg ~build_hash ~dep_doc_hashes =
7575+ let fields =
7676+ [
7777+ ("package", `String (OpamPackage.to_string pkg));
7878+ ("build_hash", `String build_hash);
7979+ ("dep_doc_hashes", `List (List.map (fun h -> `String h) dep_doc_hashes));
8080+ ("created", `Float (Unix.time ()));
8181+ ]
8282+ in
8383+ let fields =
8484+ match doc_result with
8585+ | None -> fields
8686+ | Some doc -> fields @ [ ("doc", doc) ]
8787+ in
8888+ Yojson.Safe.to_file name (`Assoc fields)
8989+9090+let load_layer_info_exit_status name =
9191+ let json = Yojson.Safe.from_file name in
9292+ Yojson.Safe.Util.(json |> member "exit_status" |> to_int)
9393+9494+let load_layer_info_package_name name =
9595+ let json = Yojson.Safe.from_file name in
9696+ Yojson.Safe.Util.(json |> member "package" |> to_string)
9797+9898+let load_layer_info_installed_libs name =
9999+ let json = Yojson.Safe.from_file name in
100100+ let open Yojson.Safe.Util in
101101+ match json |> member "installed_libs" with
102102+ | `Null -> []
103103+ | libs -> libs |> to_list |> List.map to_string
104104+105105+let load_layer_info_installed_docs name =
106106+ let json = Yojson.Safe.from_file name in
107107+ let open Yojson.Safe.Util in
108108+ match json |> member "installed_docs" with
109109+ | `Null -> []
110110+ | docs -> docs |> to_list |> List.map to_string
111111+112112+let load_layer_info_doc_failed name =
113113+ let json = Yojson.Safe.from_file name in
114114+ let open Yojson.Safe.Util in
115115+ match json |> member "doc" with
116116+ | `Null -> false
117117+ | doc ->
118118+ match doc |> member "status" |> to_string with
119119+ | "failure" -> true
120120+ | _ -> false
121121+122122+let load_layer_info_dep_doc_hashes name =
123123+ let json = Yojson.Safe.from_file name in
124124+ let open Yojson.Safe.Util in
125125+ match json |> member "dep_doc_hashes" with
126126+ | `Null -> []
127127+ | hashes -> hashes |> to_list |> List.map to_string
128128+129129+let solution_to_json pkgs =
130130+ `Assoc
131131+ (OpamPackage.Map.fold
132132+ (fun pkg deps lst -> (OpamPackage.to_string pkg, `List (OpamPackage.Set.to_list_map (fun p -> `String (OpamPackage.to_string p)) deps)) :: lst)
133133+ pkgs [])
134134+135135+let solution_of_json json =
136136+ let open Yojson.Safe.Util in
137137+ json |> to_assoc
138138+ |> List.fold_left
139139+ (fun acc (s, l) ->
140140+ let pkg = s |> OpamPackage.of_string in
141141+ let deps = l |> to_list |> List.map (fun s -> s |> to_string |> OpamPackage.of_string) |> OpamPackage.Set.of_list in
142142+ OpamPackage.Map.add pkg deps acc)
143143+ OpamPackage.Map.empty
144144+145145+let solution_save name pkgs =
146146+ Yojson.Safe.to_file name (solution_to_json pkgs)
147147+148148+let solution_load name =
149149+ Yojson.Safe.from_file name |> solution_of_json
150150+151151+let solution_to_string pkgs =
152152+ Yojson.Safe.to_string (solution_to_json pkgs)
153153+154154+let solution_of_string str =
155155+ Yojson.Safe.from_string str |> solution_of_json
156156+157157+(** Scan a layer's fs directory for installed lib files.
158158+ Returns a list of relative paths within lib/ (e.g., ["ocaml/format.cmti", "hmap/hmap.cmti"]).
159159+ Only includes files with documentation-relevant extensions.
160160+ Skips directories that can't be read (permission denied). *)
161161+let scan_installed_lib_files ~layer_dir =
162162+ let lib_dir = Path.(layer_dir / "fs" / "home" / "opam" / ".opam" / "default" / "lib") in
163163+ (* Include .ml and .mli for odoc source documentation *)
164164+ let extensions = [ ".cmi"; ".cmti"; ".cmt"; ".cma"; ".cmxa"; ".cmx"; ".ml"; ".mli" ] in
165165+ let files = [ "META"; "dune-package" ] in
166166+ let result = ref [] in
167167+ let rec scan_dir prefix dir =
168168+ try
169169+ if Sys.file_exists dir && Sys.is_directory dir then
170170+ Sys.readdir dir |> Array.iter (fun name ->
171171+ let full_path = Path.(dir / name) in
172172+ let rel_path = if prefix = "" then name else prefix ^ "/" ^ name in
173173+ try
174174+ if Sys.is_directory full_path then
175175+ scan_dir rel_path full_path
176176+ else if List.exists (fun ext -> Filename.check_suffix name ext) extensions
177177+ || List.mem name files then
178178+ result := rel_path :: !result
179179+ with Sys_error _ -> () (* Skip files we can't access *))
180180+ with Sys_error _ -> () (* Skip directories we can't read *)
181181+ in
182182+ scan_dir "" lib_dir;
183183+ List.sort String.compare !result
184184+185185+(** Scan a layer's fs directory for installed doc files.
186186+ Returns a list of relative paths within doc/ (e.g., ["hmap.0.8.1/index.mld"]).
187187+ Skips directories that can't be read (permission denied). *)
188188+let scan_installed_doc_files ~layer_dir =
189189+ let doc_dir = Path.(layer_dir / "fs" / "home" / "opam" / ".opam" / "default" / "doc") in
190190+ let result = ref [] in
191191+ let rec scan_dir prefix dir =
192192+ try
193193+ if Sys.file_exists dir && Sys.is_directory dir then
194194+ Sys.readdir dir |> Array.iter (fun name ->
195195+ let full_path = Path.(dir / name) in
196196+ let rel_path = if prefix = "" then name else prefix ^ "/" ^ name in
197197+ try
198198+ if Sys.is_directory full_path then
199199+ scan_dir rel_path full_path
200200+ else if Filename.check_suffix name ".mld"
201201+ || String.equal name "odoc-config.sexp" then
202202+ result := rel_path :: !result
203203+ with Sys_error _ -> () (* Skip files we can't access *))
204204+ with Sys_error _ -> () (* Skip directories we can't read *)
205205+ in
206206+ scan_dir "" doc_dir;
207207+ List.sort String.compare !result
208208+209209+let create_opam_repository path =
210210+ let path = Path.(path / "opam-repository") in
211211+ let () = Os.mkdir path in
212212+ let () = Os.write_to_file Path.(path / "repo") {|opam-version: "2.0"|} in
213213+ path
214214+215215+let opam_file opam_repositories pkg =
216216+ List.find_map
217217+ (fun opam_repository ->
218218+ let opam = Path.(opam_repository / "packages" / OpamPackage.name_to_string pkg / OpamPackage.to_string pkg / "opam") in
219219+ if Sys.file_exists opam then Some (OpamFilename.raw opam |> OpamFile.make |> OpamFile.OPAM.read) else None)
220220+ opam_repositories
···11+Hi Claude, please can you write a JavaScript application which will run in a web browser to display the results of CI builds.
22+33+A directory structure exists as shown below, which includes the GitHub SHA used in the run and a list of directories below, one per OS where it was tested. Not all the testing happens in parallel, so potentially some OS directories won't be available. Thus, don't assume there will always be Debian and Windows - there may be others or fewer. Below the OS directory is the version of the compiler used for testing, in this case, version 5.3.0.
44+55+```
66+.
77+├── commits.json
88+└── d2563c38bd32daaee47b6a6cade7e4c79270ef73
99+ ├── commit.json
1010+ └── debian-12
1111+ └── 5.3.0
1212+ ├── 0install.2.18.dot
1313+ ├── 0install.2.18.json
1414+ ├── alcotest.1.9.0.dot
1515+ ├── alcotest.1.9.0.json
1616+ ├── ansi.0.7.0.dot
1717+ ├── ansi.0.7.0.json
1818+ ├── bos.0.2.1.dot
1919+ ├── bos.0.2.1.json
2020+ ├── diffast-api.0.2.dot
2121+ └── diffast-api.0.2.json
2222+```
2323+2424+`commits.json` contains a description of each commit directory like this
2525+2626+```
2727+[
2828+ {
2929+ "sha": "d2563c38bd32daaee47b6a6cade7e4c79270ef73",
3030+ "date": "2025-07-24T03:50:08+00:00",
3131+ "message": "Merge pull request #28190 from rmonat/opam-publish-mopsa.1.2"
3232+ }
3333+]
3434+```
3535+3636+And each `commit.json` contains this:
3737+3838+```
3939+{
4040+ "debian-12": {
4141+ "5.3.0": [
4242+ {
4343+ "name": "0install.2.18",
4444+ "status": "success",
4545+ "layer": "a900bb178c94aec3a9b6be96dc150ddc"
4646+ },
4747+ {
4848+ "name": "alcotest.1.9.0",
4949+ "status": "success",
5050+ "layer": "9fcc87163d8aaf7985a90210c0ef37b1"
5151+ },
5252+ {
5353+ "name": "ansi.0.7.0",
5454+ "status": "success",
5555+ "layer": "336e1b50c3aeab32120df1621c3e1cee"
5656+ },
5757+ {
5858+ "name": "bos.0.2.1",
5959+ "status": "success",
6060+ "layer": "66d17ff760cb09b4217f4b392b96c792"
6161+ },
6262+ {
6363+ "name": "diffast-api.0.2",
6464+ "status": "success",
6565+ "layer": "85314b19757c6d8df1f602451071eea8"
6666+ }
6767+ ]
6868+ }
6969+}
7070+```
7171+7272+`name` contains the name of the package. The package name is in the format name dot version. So 0install.2.18.1 is package 0install version 2.15.1. i.e. The first occurrence of a dot indicates the start of the version number. The version number itself can contain many dots.
7373+7474+`status` is the overall status of the build, which will be one of: no_solution, dependency_failed, failure, success.
7575+7676+The `.dot` files are GraphViz file with the same name as the package. Where there is a solution a `.dot` file is provided but not all packages have a solution. Please add the digraph to the package page log. Let's render it the browser with `Viz.js`. The output will need to be scaled to fit the display window as the graph may be quite large. The `.dot` files are minimal without any styling. Example below:
7777+7878+```
7979+digraph opam {
8080+ "0install-solver.2.18" -> {"dune.3.19.1" "ocaml.5.3.0"}
8181+ "dune.3.19.1" -> {"base-threads.base" "base-unix.base" "ocaml.5.3.0"}
8282+ "ocaml.5.3.0" -> {"ocaml-base-compiler.5.3.0" "ocaml-config.3"}
8383+ "ocaml-base-compiler.5.3.0" -> "ocaml-compiler.5.3.0";
8484+ "ocaml-config.3" -> "ocaml-base-compiler.5.3.0";
8585+}
8686+```
8787+8888+When we render these, we should apply some nice default styling like this:
8989+9090+```js
9191+const defaultStyles = { rankdir: "LR", nodeShape: "box", nodeFontColor: "#ffffff", nodeColor: "#ef7a08", nodeFillColor: "#ef7a08", nodeStyle: "filled", edgeColor: "#888888" };
9292+```
9393+9494+The `layer` is a hash of the layer which contains this build.
9595+9696+The hash index into the `/cache/` directory structure:
9797+9898+```
9999+/cache
100100+├── fc3a8cbcba91cf5d11de21dad7d138bc
101101+│ ├── build.log
102102+│ ├── layer.json
103103+├── adad97c884045a672843d4de9980f82d
104104+│ ├── build.log
105105+│ ├── layer.json
106106+```
107107+108108+`build.log` is the text output of the build.
109109+110110+`layer.json` is a JSON file containing the package name for this layer. `deps` is a list of direct dependency packages, `created` is the Unix timestamp of when the layer was created, and `status` contains a integer value, which is the exit code of that step. 0 = success, anything else is failure.
111111+112112+`hashes` is a _complete_ list of all dependent layers, which are indexes into the `/cache` structure.
113113+114114+We should NOT load the `/cache/layers.json` from each dependent layer. The initial `/cache/layers.json` contains ALL of the dependent layers.
115115+116116+The deps array contains the package names, and the hashes array contains the corresponding layer hashes, and they're in the same order. This means deps[0] corresponds to hashes[0], deps[1] corresponds to hashes[1], etc. Therefore, when displaying a sub layer, we can use the name from `deps[n]` rather than displaying `hashes[n]`.
117117+118118+```
119119+{{"package":"0install.2.18","exit_status":0,"deps":["ocurl.0.9.2","obus.1.2.5","lwt_react.1.2.0","lwt_ppx.5.9.1","lwt_log.1.1.2","lwt.5.9.1","xmlm.1.4.0","sha.1.15.4","react.1.2.2","ppxlib.0.35.0","ocplib-endian.1.2","menhir.20240715","dune-configurator.3.19.1","yojson.3.0.0","topkg.1.0.8","stdlib-shims.0.3.0","sexplib0.v0.17.0","ppx_derivers.1.2.1","ocaml-compiler-libs.v0.17.0","menhirSdk.20240715","menhirLib.20240715","menhirCST.20240715","csexp.1.5.2","cppo.1.8.0","base-bytes.base","0install-solver.2.18","ocamlfind.1.9.8","ocamlbuild.0.16.1","dune.3.19.1","ocaml.5.3.0","ocaml-config.3","ocaml-base-compiler.5.3.0","ocaml-compiler.5.3.0","conf-libcurl.2","base-unix.base","base-threads.base"],"hashes":["13ab638dcd860284863fd3a789868bac","936ac8e89f1781872af41c9780e3d421","2b83dd96d968dd921d6c53fb5d95cafc","d3c7cd833ee0e6b3fc08775ff9e82138","36ef6c2ba31e9c4ab5105a434d442ef4","08408fb34675182de9f456e0e39d0d47","19659791294010cc8d9cbd4b38f7e05b","2f9417ef8a4aedde0ba7cbc20f2635ce","edabe71adfdd082786455a37eefd5ade","2a47480c55f1c296bea55aea7d517132","d5551810d57c96eb0efc23e34b2a2d85","02ad9577d22698f5b0eeafc805566937","7ba2da9b3e919b0ec46c2885408b5a13","c111d0a4ce2c437af31c375d8310364c","2959afacd302f3852f1b203975b20442","e44f794eb6b75ee3f0873f8d44053640","2ba84bac5dd568cbe4e347eea0cb5c04","636f22552f4f47d53b0acf51c32dc002","95cacb84e95b32f1e09eec2ac8235876","aaad39ce113f3211ea5f6ac2949c263f","187beb9820c9e7e3983abd78b0d7c26c","907742fd086bef00728418b60ab1b1eb","da4cb5bcd209a33dfaef4609dd59fcf5","fd2ec950f6ec57ee9f3068a1be983eb2","6ce5d36394264c11fa9c9783f5c571eb","a867fb905efb9b7c28d2d93e911262bf","2502449a08ffae10ec7e76f69573bea0","ffe3a3b5cdff0845a333205c23109703","76659bcdbdbfff426a73ebb9b286a4d2","194f6e5c7f38885c9e2eea57d37ce7b0","351d01c1b782e729cf6a322649908629","71c33cbf6b90df69ede09d910b6d2a53","1980e378e8e2321c71032d0d6a8aa32d","9d23d01aec9788fe82d6b9f7a9aac11e","b102fe18cbdaa1a210b1fce3dbf1acbc","709149b2baf4d5f8851c4c1fe0cc5512"],"created":1754905764.0}
120120+```
121121+122122+The site layout would be like this:-
123123+124124+```
125125+site/
126126+├── cache
127127+│ ├── 061bae6b4dbdb04ae77b8bb4f22d9a35
128128+│ │ └── layer.json
129129+│ └── 07958b7376fc56c89e5838b1dac502db
130130+│ └── layer.json
131131+├── ce03608b4ba656c052ef5e868cf34b9e86d02aac
132132+│ └── commit.json
133133+└── commits.json
134134+├── index.html # generate this
135135+├── script.js # generate this
136136+└── stylesheet.css # generate this
137137+```
138138+139139+`index.html` would load `script.js` and `stylesheet.css` to display the site although these can be embedded in a single `index.html` if preferred.
140140+141141+The land page would display a list of commits from `commits.json`. Each of these could be clicked on to display a list of the packages within that commit.
142142+143143+The packages should be displayed as a table with rows for each package, and columns for each compiler and a colour-coded link to the build log. We must use symbols of `success`, `failure`, `no_solution` and `dependency_failed` to save on space. Each OS should be represented as a series of tabs along the top of the page. The page should cope with a narrow display on a mobile phone to avoid losing the right-hand columns of the table. Perhaps the ability to scroll would be sufficient. Since there will be ~5000 rows and 12 columns in the table, we should implement pagination.
144144+145145+For any given package, we should display the build log and the GraphViz visualisation as separate tabs. These graphs can be quite large so we might need to be able to zoom.
146146+147147+The build log should lazy-load all of the sub-layer build logs.
148148+149149+Can make it so that the page URL reflects the current page to provide a permalink to the current commit/os/package?
150150+
+33
day10/day10-web.opam
···11+# This file is generated by dune, edit dune-project instead
22+opam-version: "2.0"
33+synopsis: "Web dashboard for day10 documentation status"
44+description: "Status dashboard for package maintainers and operators"
55+maintainer: ["Maintainer Name <maintainer@example.com>"]
66+authors: ["Author Name <author@example.com>"]
77+license: "LICENSE"
88+homepage: "https://github.com/username/reponame"
99+doc: "https://url/to/documentation"
1010+bug-reports: "https://github.com/username/reponame/issues"
1111+depends: [
1212+ "ocaml" {>= "5.3.0"}
1313+ "dune" {>= "3.17"}
1414+ "dream"
1515+ "day10"
1616+ "cmdliner"
1717+ "odoc" {with-doc}
1818+]
1919+build: [
2020+ ["dune" "subst"] {dev}
2121+ [
2222+ "dune"
2323+ "build"
2424+ "-p"
2525+ name
2626+ "-j"
2727+ jobs
2828+ "@install"
2929+ "@runtest" {with-test}
3030+ "@doc" {with-doc}
3131+ ]
3232+]
3333+dev-repo: "git+https://github.com/username/reponame.git"
···11+# day10 Administrator's Guide
22+33+This guide covers how to set up and run day10 as a documentation generation system for OCaml packages, intended as a replacement for ocaml-docs-ci.
44+55+## Overview
66+77+day10 builds OCaml packages and generates documentation using odoc. Key features:
88+99+- **Fresh solving**: Always solves against current opam-repository (no stale cross-references)
1010+- **Graceful degradation**: Failed rebuilds preserve existing docs
1111+- **Layer caching**: Fast rebuilds via overlay filesystem caching
1212+- **Parallel processing**: Fork-based parallelism for batch runs
1313+1414+## Prerequisites
1515+1616+### System Requirements
1717+1818+- Linux (Debian/Ubuntu recommended)
1919+- Root access (for runc containers)
2020+- At least 50GB disk space for cache
2121+- 8GB+ RAM recommended
2222+2323+### Dependencies
2424+2525+```bash
2626+# System packages
2727+sudo apt-get update
2828+sudo apt-get install -y \
2929+ build-essential \
3030+ git \
3131+ curl \
3232+ runc \
3333+ opam
3434+3535+# Initialize opam
3636+opam init -y
3737+eval $(opam env)
3838+3939+# Install OCaml and day10 dependencies
4040+opam switch create 5.2.0
4141+opam install -y dune opam-0install yojson cmdliner dockerfile ppx_deriving_yojson
4242+```
4343+4444+### Clone opam-repository
4545+4646+```bash
4747+git clone https://github.com/ocaml/opam-repository /data/opam-repository
4848+```
4949+5050+## Installation
5151+5252+### Build day10
5353+5454+```bash
5555+git clone https://github.com/mtelvers/ohc day10
5656+cd day10
5757+opam install . --deps-only
5858+dune build
5959+dune install
6060+```
6161+6262+Verify installation:
6363+```bash
6464+day10 --version
6565+day10 --help
6666+```
6767+6868+## Directory Structure
6969+7070+Recommended production layout:
7171+7272+```
7373+/data/
7474+├── opam-repository/ # Clone of ocaml/opam-repository
7575+├── cache/ # Layer cache (can grow large)
7676+│ ├── debian-12-x86_64/
7777+│ │ ├── base/ # Base image layer
7878+│ │ ├── solutions/ # Cached solver results
7979+│ │ ├── build-*/ # Build layers
8080+│ │ └── doc-*/ # Doc layers
8181+│ └── logs/
8282+│ ├── runs/ # Per-run logs and summaries
8383+│ └── latest # Symlink to most recent run
8484+├── html/ # Generated documentation
8585+│ ├── p/ # Blessed package docs
8686+│ │ └── {pkg}/{ver}/
8787+│ └── u/ # Universe docs (dependencies)
8888+│ └── {hash}/{pkg}/{ver}/
8989+└── packages.json # Package list for batch runs
9090+```
9191+9292+## Basic Usage
9393+9494+### Single Package
9595+9696+Build and generate docs for one package:
9797+9898+```bash
9999+day10 health-check \
100100+ --cache-dir /data/cache \
101101+ --opam-repository /data/opam-repository \
102102+ --html-output /data/html \
103103+ base.0.16.0
104104+```
105105+106106+### Multiple Packages
107107+108108+Create a JSON file listing packages:
109109+110110+```bash
111111+# packages.json
112112+{"packages": ["base.0.16.0", "core.0.16.0", "async.0.16.0"]}
113113+```
114114+115115+Run batch mode:
116116+117117+```bash
118118+day10 batch \
119119+ --cache-dir /data/cache \
120120+ --opam-repository /data/opam-repository \
121121+ --html-output /data/html \
122122+ --fork 8 \
123123+ @packages.json
124124+```
125125+126126+### All Packages
127127+128128+Generate a list of all packages in opam-repository:
129129+130130+```bash
131131+day10 list \
132132+ --opam-repository /data/opam-repository \
133133+ --all-versions \
134134+ --json /data/all-packages.json
135135+```
136136+137137+Run on everything (this takes hours/days):
138138+139139+```bash
140140+day10 batch \
141141+ --cache-dir /data/cache \
142142+ --opam-repository /data/opam-repository \
143143+ --html-output /data/html \
144144+ --fork 16 \
145145+ @/data/all-packages.json
146146+```
147147+148148+## Command Reference
149149+150150+### day10 batch
151151+152152+Main command for production use.
153153+154154+```
155155+day10 batch [OPTIONS] PACKAGE
156156+157157+PACKAGE: Single package (e.g., "base.0.16.0") or @filename for JSON list
158158+159159+Required:
160160+ --cache-dir DIR Layer cache directory
161161+ --opam-repository DIR Path to opam-repository (can specify multiple)
162162+163163+Recommended:
164164+ --html-output DIR Where to write documentation
165165+ --fork N Parallel workers (default: 1)
166166+167167+Optional:
168168+ --ocaml-version VER Pin OCaml version (default: solver picks)
169169+ --dry-run Check what would be built without building
170170+ --log Print build logs to stdout
171171+ --json DIR Write per-package JSON results
172172+ --md DIR Write per-package markdown results
173173+```
174174+175175+### day10 health-check
176176+177177+Run on single package or small set (simpler than batch for testing):
178178+179179+```
180180+day10 health-check [OPTIONS] PACKAGE
181181+```
182182+183183+### day10 list
184184+185185+List packages in opam-repository:
186186+187187+```
188188+day10 list --opam-repository DIR [--all-versions] [--json FILE]
189189+```
190190+191191+## Production Setup
192192+193193+### Systemd Service
194194+195195+Create `/etc/systemd/system/day10.service`:
196196+197197+```ini
198198+[Unit]
199199+Description=day10 documentation generator
200200+After=network.target
201201+202202+[Service]
203203+Type=oneshot
204204+User=root
205205+WorkingDirectory=/data
206206+ExecStart=/usr/local/bin/day10 batch \
207207+ --cache-dir /data/cache \
208208+ --opam-repository /data/opam-repository \
209209+ --html-output /data/html \
210210+ --fork 8 \
211211+ @/data/packages.json
212212+StandardOutput=journal
213213+StandardError=journal
214214+215215+[Install]
216216+WantedBy=multi-user.target
217217+```
218218+219219+### Cron Job
220220+221221+For periodic rebuilds (e.g., daily at 2 AM):
222222+223223+```bash
224224+# /etc/cron.d/day10
225225+0 2 * * * root flock -n /var/run/day10.lock /usr/local/bin/day10 batch --cache-dir /data/cache --opam-repository /data/opam-repository --html-output /data/html --fork 8 @/data/packages.json >> /var/log/day10-cron.log 2>&1
226226+```
227227+228228+### Webhook Trigger
229229+230230+To rebuild on opam-repository updates, set up a webhook endpoint that:
231231+232232+1. Pulls latest opam-repository
233233+2. Triggers day10 batch run
234234+235235+Example script `/usr/local/bin/day10-trigger.sh`:
236236+237237+```bash
238238+#!/bin/bash
239239+set -e
240240+241241+cd /data/opam-repository
242242+git fetch origin
243243+git reset --hard origin/master
244244+245245+flock -n /var/run/day10.lock \
246246+ day10 batch \
247247+ --cache-dir /data/cache \
248248+ --opam-repository /data/opam-repository \
249249+ --html-output /data/html \
250250+ --fork 8 \
251251+ @/data/packages.json
252252+```
253253+254254+### Serving Documentation
255255+256256+Use nginx to serve the HTML output:
257257+258258+```nginx
259259+server {
260260+ listen 80;
261261+ server_name docs.example.com;
262262+ root /data/html;
263263+264264+ location / {
265265+ autoindex on;
266266+ try_files $uri $uri/ =404;
267267+ }
268268+}
269269+```
270270+271271+### Status Dashboard (day10-web)
272272+273273+day10-web provides a web interface for monitoring package build status:
274274+275275+```bash
276276+# Install day10-web
277277+opam install day10-web
278278+279279+# Run the dashboard
280280+day10-web --cache-dir /data/cache --html-dir /data/html --port 8080
281281+```
282282+283283+#### Systemd Service for day10-web
284284+285285+Create `/etc/systemd/system/day10-web.service`:
286286+287287+```ini
288288+[Unit]
289289+Description=day10 status dashboard
290290+After=network.target
291291+292292+[Service]
293293+Type=simple
294294+User=www-data
295295+ExecStart=/usr/local/bin/day10-web \
296296+ --cache-dir /data/cache \
297297+ --html-dir /data/html \
298298+ --host 0.0.0.0 \
299299+ --port 8080
300300+Restart=always
301301+302302+[Install]
303303+WantedBy=multi-user.target
304304+```
305305+306306+Enable and start:
307307+308308+```bash
309309+sudo systemctl enable day10-web
310310+sudo systemctl start day10-web
311311+```
312312+313313+#### Combined nginx Configuration
314314+315315+Serve both the dashboard and documentation:
316316+317317+```nginx
318318+server {
319319+ listen 80;
320320+ server_name docs.example.com;
321321+322322+ # Status dashboard
323323+ location / {
324324+ proxy_pass http://127.0.0.1:8080;
325325+ proxy_set_header Host $host;
326326+ proxy_set_header X-Real-IP $remote_addr;
327327+ }
328328+329329+ # Generated documentation
330330+ location /docs/ {
331331+ alias /data/html/;
332332+ autoindex on;
333333+ try_files $uri $uri/ =404;
334334+ }
335335+}
336336+```
337337+338338+#### Dashboard Features
339339+340340+- **Dashboard** (`/`): Overview with build/doc success rates, latest run summary
341341+- **Packages** (`/packages`): Searchable list of all packages with docs
342342+- **Package Detail** (`/packages/{name}/{version}`): Version list and doc links
343343+- **Runs** (`/runs`): History of all batch runs
344344+- **Run Detail** (`/runs/{id}`): Statistics, failures, and log links
345345+- **Logs** (`/runs/{id}/build/{pkg}`, `/runs/{id}/docs/{pkg}`): View build and doc logs
346346+347347+## Monitoring
348348+349349+### Run Logs
350350+351351+Each batch run creates a timestamped directory:
352352+353353+```
354354+/data/cache/logs/runs/2026-02-04-120000/
355355+├── summary.json # Run statistics
356356+├── build/ # Build logs by package
357357+│ ├── base.0.16.0.log
358358+│ └── core.0.16.0.log
359359+└── docs/ # Doc generation logs
360360+ ├── base.0.16.0.log
361361+ └── core.0.16.0.log
362362+```
363363+364364+The `latest` symlink always points to the most recent run:
365365+366366+```bash
367367+cat /data/cache/logs/latest/summary.json
368368+```
369369+370370+### summary.json Format
371371+372372+```json
373373+{
374374+ "run_id": "2026-02-04-120000",
375375+ "start_time": "2026-02-04T12:00:00",
376376+ "end_time": "2026-02-04T14:30:00",
377377+ "duration_seconds": 9000,
378378+ "targets_requested": 100,
379379+ "solutions_found": 95,
380380+ "build_success": 90,
381381+ "build_failed": 5,
382382+ "doc_success": 85,
383383+ "doc_failed": 3,
384384+ "doc_skipped": 2,
385385+ "failures": [
386386+ {"package": "broken-pkg.1.0.0", "error": "build exit code 2"},
387387+ {"package": "bad-docs.2.0.0", "error": "doc: odoc error"}
388388+ ]
389389+}
390390+```
391391+392392+### Checking Status
393393+394394+```bash
395395+# Quick status
396396+jq '.build_success, .build_failed, .doc_success, .doc_failed' \
397397+ /data/cache/logs/latest/summary.json
398398+399399+# List failures
400400+jq -r '.failures[] | "\(.package): \(.error)"' \
401401+ /data/cache/logs/latest/summary.json
402402+403403+# Duration
404404+jq '.duration_seconds / 60 | floor | "\(.)m"' \
405405+ /data/cache/logs/latest/summary.json
406406+```
407407+408408+### Disk Usage
409409+410410+Monitor cache growth:
411411+412412+```bash
413413+du -sh /data/cache/debian-12-x86_64/
414414+du -sh /data/html/
415415+```
416416+417417+## Maintenance
418418+419419+### Cache Management
420420+421421+The cache grows over time. After each batch run, garbage collection automatically:
422422+423423+1. **Layer GC**: Deletes build/doc layers not referenced by current solutions
424424+2. **Universe GC**: Deletes universe directories not referenced by any blessed package
425425+426426+GC runs automatically at the end of each batch. Special layers are preserved:
427427+- `base` - Base OS image
428428+- `solutions` - Solver cache
429429+- `doc-driver-*` - Shared odoc driver
430430+- `doc-odoc-*` - Per-OCaml-version odoc
431431+432432+### Manual Cache Cleanup
433433+434434+To force a complete rebuild:
435435+436436+```bash
437437+# Remove all layers (keeps base)
438438+rm -rf /data/cache/debian-12-x86_64/build-*
439439+rm -rf /data/cache/debian-12-x86_64/doc-*
440440+441441+# Remove solution cache (forces re-solving)
442442+rm -rf /data/cache/debian-12-x86_64/solutions/
443443+```
444444+445445+### Updating opam-repository
446446+447447+```bash
448448+cd /data/opam-repository
449449+git fetch origin
450450+git reset --hard origin/master
451451+```
452452+453453+Solutions are cached by opam-repository commit hash, so updating automatically invalidates old solutions.
454454+455455+### Epoch Transitions
456456+457457+For major changes (new odoc version, URL scheme change), you may want a clean rebuild:
458458+459459+1. Create new html directory: `/data/html-new/`
460460+2. Run full batch with `--html-output /data/html-new/`
461461+3. Once complete, atomically swap: `mv /data/html /data/html-old && mv /data/html-new /data/html`
462462+4. Remove old: `rm -rf /data/html-old`
463463+464464+## Troubleshooting
465465+466466+### Build Failures
467467+468468+Check the build log:
469469+470470+```bash
471471+cat /data/cache/logs/latest/build/failing-pkg.1.0.0.log
472472+```
473473+474474+Or check the layer directly:
475475+476476+```bash
477477+cat /data/cache/debian-12-x86_64/build-*/build.log
478478+```
479479+480480+### Doc Generation Failures
481481+482482+```bash
483483+cat /data/cache/logs/latest/docs/failing-pkg.1.0.0.log
484484+```
485485+486486+Common issues:
487487+- Missing `.cmti` files (package doesn't install them)
488488+- odoc bugs with certain code patterns
489489+- Memory exhaustion on large packages
490490+491491+### Stale .new/.old Directories
492492+493493+If a run was interrupted, stale staging directories may exist:
494494+495495+```bash
496496+find /data/html -name "*.new" -o -name "*.old"
497497+```
498498+499499+These are automatically cleaned up at the start of each batch run.
500500+501501+### Permission Issues
502502+503503+day10 uses runc containers which require root. If you see permission errors:
504504+505505+```bash
506506+# Check runc works
507507+sudo runc --version
508508+509509+# Ensure cache directory is accessible
510510+sudo chown -R root:root /data/cache
511511+```
512512+513513+### Memory Issues
514514+515515+For large package sets, you may need to limit parallelism:
516516+517517+```bash
518518+# Reduce fork count
519519+day10 batch --fork 4 ...
520520+```
521521+522522+Or increase system memory/swap.
523523+524524+## Architecture Notes
525525+526526+### How Layers Work
527527+528528+Each package build creates a layer using overlay filesystem:
529529+530530+```
531531+build-{hash}/
532532+├── fs/ # Filesystem overlay (installed files)
533533+├── build.log # Build output
534534+└── layer.json # Metadata (package, deps, status)
535535+```
536536+537537+The hash is computed from the package and its dependencies, so unchanged packages reuse existing layers.
538538+539539+### Blessing
540540+541541+In batch mode, day10 computes "blessings" - which package version is canonical for each package name. Blessed packages go to `/html/p/`, non-blessed go to `/html/u/{universe}/`.
542542+543543+### Graceful Degradation
544544+545545+When doc generation fails:
546546+1. New docs are written to a staging directory
547547+2. On success: atomically swap staging → final
548548+3. On failure: staging is discarded, old docs remain
549549+550550+This ensures the live site never shows broken docs.
551551+552552+## Getting Help
553553+554554+- Check logs in `/data/cache/logs/latest/`
555555+- Review `summary.json` for failure details
556556+- File issues at: https://github.com/mtelvers/ohc/issues
+697
day10/docs/GAP_ANALYSIS.md
···11+# Gap Analysis: Replacing ocaml-docs-ci with day10
22+33+**Date:** 2026-02-03
44+**Purpose:** Comprehensive comparison of `day10` (OHC) and `ocaml-docs-ci` to identify features, gaps, and requirements for replacing ocaml-docs-ci as the documentation CI system for docs.ocaml.org.
55+66+---
77+88+## Table of Contents
99+1010+1. [Executive Summary](#executive-summary)
1111+2. [Architecture Overview](#architecture-overview)
1212+3. [Feature Comparison Matrix](#feature-comparison-matrix)
1313+4. [Detailed Gap Analysis](#detailed-gap-analysis)
1414+5. [Ecosystem Integration](#ecosystem-integration)
1515+6. [Implementation Roadmap](#implementation-roadmap)
1616+7. [Risk Assessment](#risk-assessment)
1717+1818+---
1919+2020+## Executive Summary
2121+2222+### Current State
2323+2424+| Aspect | day10 | ocaml-docs-ci |
2525+|--------|-------|---------------|
2626+| **Primary Purpose** | Health checking OPAM packages (build + docs) | CI pipeline for docs.ocaml.org |
2727+| **Architecture** | Standalone CLI with fork-based parallelism | OCurrent-based reactive pipeline |
2828+| **Container Runtime** | runc/OCI with overlay2 layers | OCluster (single machine in practice) |
2929+| **Doc Generation** | Uses odoc_driver_voodoo | Uses voodoo-do + odoc_driver_voodoo |
3030+| **State Management** | File-based (layer.json) | SQLite database + OCurrent cache |
3131+| **Scalability** | Single machine, forked workers | Single machine (OCluster theoretical) |
3232+3333+### Key Findings
3434+3535+**Important Context:** While ocaml-docs-ci has OCluster infrastructure for theoretically distributed execution, **in practice it runs on a single machine**. This significantly reduces the gap between the two systems.
3636+3737+**day10 Strengths:**
3838+- Simpler, more portable architecture (Linux/Windows/FreeBSD)
3939+- Efficient overlay2-based incremental building
4040+- Direct container control without orchestration overhead
4141+- Standalone operation without external services
4242+- Comparable parallelism model (fork-based vs single-machine OCluster)
4343+4444+**ocaml-docs-ci Strengths:**
4545+- Production-proven for docs.ocaml.org
4646+- Reactive pipeline with automatic rebuilding
4747+- Rich monitoring and status APIs
4848+- Epoch-based atomic updates
4949+- Web UI for status visibility
5050+5151+### Migration Complexity: **MODERATE**
5252+5353+Since both systems effectively run on single machines, the gap is smaller than it might appear from the architecture diagrams. The core documentation generation is identical (both use voodoo/odoc_driver_voodoo). The main gaps are in orchestration (reactive vs manual), state management, and deployment infrastructure (epochs).
5454+5555+---
5656+5757+## Architecture Overview
5858+5959+### day10 Architecture
6060+6161+```
6262+┌─────────────────────────────────────────────────────────────┐
6363+│ day10 CLI │
6464+├─────────────────────────────────────────────────────────────┤
6565+│ Commands: health-check | ci | batch | list | sync-docs │
6666+└─────────────────────┬───────────────────────────────────────┘
6767+ │
6868+ ┌────────────┼────────────┐
6969+ ▼ ▼ ▼
7070+┌─────────────┐ ┌──────────┐ ┌──────────────┐
7171+│ Solver │ │ Builder │ │ Doc Gen │
7272+│ opam-0install│ │ runc │ │odoc_driver │
7373+└─────────────┘ └──────────┘ └──────────────┘
7474+ │ │ │
7575+ └────────────┼────────────┘
7676+ ▼
7777+ ┌────────────────────────┐
7878+ │ Overlay2 Layers │
7979+ │ (cache_dir/) │
8080+ │ ├── base/fs │
8181+ │ ├── build-{hash}/ │
8282+ │ ├── doc-{hash}/ │
8383+ │ └── layer.json │
8484+ └────────────────────────┘
8585+```
8686+8787+**Key Characteristics:**
8888+- Single-machine execution with fork-based parallelism
8989+- Layer-based caching with overlay2 filesystem
9090+- Deterministic hash-based layer identification
9191+- Direct runc container execution
9292+9393+### ocaml-docs-ci Architecture
9494+9595+```
9696+┌─────────────────────────────────────────────────────────────┐
9797+│ ocaml-docs-ci │
9898+│ (OCurrent Pipeline) │
9999+├─────────────────────────────────────────────────────────────┤
100100+│ Stages: Track → Solve → Prep → Bless → Compile → Publish │
101101+└─────────────────────┬───────────────────────────────────────┘
102102+ │
103103+ ┌─────────────────┼─────────────────┐
104104+ ▼ ▼ ▼
105105+┌─────────┐ ┌───────────┐ ┌──────────────┐
106106+│ Solver │ │ OCluster │ │ Storage │
107107+│ Service │ │ (Workers) │ │ Server │
108108+│(Cap'n P)│ │ │ │ (SSH/rsync) │
109109+└─────────┘ └───────────┘ └──────────────┘
110110+ │
111111+ ┌──────────┴──────────┐
112112+ ▼ ▼
113113+┌─────────────────┐ ┌─────────────────┐
114114+│ prep/ │ │ html/ │
115115+│ (voodoo-prep) │ │ (HTML output) │
116116+└─────────────────┘ └─────────────────┘
117117+ │
118118+ ▼
119119+ ┌─────────────────┐
120120+ │ docs.ocaml.org │
121121+ │ (epoch symlinks)│
122122+ └─────────────────┘
123123+```
124124+125125+**Key Characteristics:**
126126+- OCluster infrastructure (but single-machine in practice)
127127+- Reactive pipeline (rebuilds on changes)
128128+- SQLite for state tracking
129129+- Cap'n Proto for service communication
130130+- Epoch-based atomic deployments
131131+132132+**Note:** Despite the distributed architecture in the diagram, ocaml-docs-ci currently runs all workers on a single machine, making it comparable to day10's fork-based approach.
133133+134134+---
135135+136136+## Feature Comparison Matrix
137137+138138+### Core Features
139139+140140+| Feature | day10 | ocaml-docs-ci | Gap Level |
141141+|---------|-------|---------------|-----------|
142142+| **Package Building** | ✅ Full | ✅ Full | None |
143143+| **Documentation Generation** | ✅ odoc_driver_voodoo | ✅ voodoo + odoc_driver | None |
144144+| **Dependency Solving** | ✅ opam-0install | ✅ opam-0install (service) | Minor |
145145+| **Multiple OCaml Versions** | ✅ Configurable | ✅ Multiple tracked | None |
146146+| **Blessing System** | ✅ Implemented | ✅ Implemented | None |
147147+| **Incremental Building** | ✅ overlay2 layers | ✅ prep caching | Different approach |
148148+149149+### Orchestration & Scheduling
150150+151151+| Feature | day10 | ocaml-docs-ci | Gap Level |
152152+|---------|-------|---------------|-----------|
153153+| **Parallelism** | ✅ Fork-based (--fork N) | ✅ OCluster (single machine) | Similar |
154154+| **Distributed Execution** | ❌ Single machine | ⚠️ Single machine (theory: multi) | None (in practice) |
155155+| **Reactive Rebuilding** | ❌ Manual trigger | ✅ OCurrent reactive | **MAJOR GAP** |
156156+| **Job Queuing** | ❌ None | ✅ OCluster scheduler | Minor |
157157+| **Automatic Change Detection** | ❌ Manual | ✅ Git-based tracking | **MAJOR GAP** |
158158+159159+### State Management
160160+161161+| Feature | day10 | ocaml-docs-ci | Gap Level |
162162+|---------|-------|---------------|-----------|
163163+| **Build State Tracking** | ✅ layer.json files | ✅ SQLite database | Different |
164164+| **Solution Caching** | ✅ Per-commit hash | ✅ Per-commit hash | Similar |
165165+| **Pipeline History** | ❌ None | ✅ Full history in DB | **MAJOR GAP** |
166166+| **Package Status Tracking** | ⚠️ Basic (JSON) | ✅ Full (DB + API) | **Moderate** |
167167+| **Epoch Management** | ❌ None | ✅ Full (atomic updates) | **MAJOR GAP** |
168168+169169+### External Integrations
170170+171171+| Feature | day10 | ocaml-docs-ci | Gap Level |
172172+|---------|-------|---------------|-----------|
173173+| **opam-repository Tracking** | ✅ Local path | ✅ Git clone + tracking | Minor |
174174+| **Storage Backend** | ✅ Local filesystem | ✅ SSH/rsync server | **Moderate** |
175175+| **Web UI** | ❌ None | ✅ OCurrent web | **MAJOR GAP** |
176176+| **API for Querying** | ❌ None | ✅ Cap'n Proto API | **MAJOR GAP** |
177177+| **GitHub Integration** | ❌ None | ✅ Via opam-repo | Minor |
178178+179179+### Output & Publishing
180180+181181+| Feature | day10 | ocaml-docs-ci | Gap Level |
182182+|---------|-------|---------------|-----------|
183183+| **HTML Generation** | ✅ Full | ✅ Full | None |
184184+| **Search Index** | ✅ Via odoc_driver | ✅ Via voodoo-gen | None |
185185+| **Atomic Deployment** | ❌ None | ✅ Epoch symlinks | **MAJOR GAP** |
186186+| **Valid Package List** | ❌ None | ✅ Published list | **Moderate** |
187187+| **Sync to Remote** | ✅ sync-docs command | ✅ rsync integration | Similar |
188188+189189+### Platform Support
190190+191191+| Feature | day10 | ocaml-docs-ci | Gap Level |
192192+|---------|-------|---------------|-----------|
193193+| **Linux x86_64** | ✅ | ✅ | None |
194194+| **Linux arm64** | ✅ | ✅ | None |
195195+| **Windows** | ✅ containerd | ❌ Linux only | day10 ahead |
196196+| **FreeBSD** | ✅ | ❌ | day10 ahead |
197197+| **Multi-arch builds** | ✅ | ✅ | None |
198198+199199+---
200200+201201+## Detailed Gap Analysis
202202+203203+### 1. CRITICAL GAPS (Must Have)
204204+205205+#### 1.1 Reactive Pipeline / Change Detection
206206+207207+**ocaml-docs-ci has:**
208208+- OCurrent-based reactive pipeline that automatically rebuilds when inputs change
209209+- Git-based tracking of opam-repository commits
210210+- Automatic detection of new/updated packages
211211+- Dependency-aware rebuilding (if A changes, rebuild dependents)
212212+213213+**day10 lacks:**
214214+- No automatic change detection
215215+- Manual triggering required
216216+- No concept of "pipeline" - just single-shot execution
217217+218218+**Implementation Options:**
219219+1. **Add OCurrent integration** - Wrap day10 in OCurrent pipeline
220220+2. **Implement custom watcher** - Poll opam-repo, track changes, trigger builds
221221+3. **External orchestration** - Use GitHub Actions/Jenkins to trigger day10
222222+223223+**Recommended:** Option 1 or 3. Adding full OCurrent would be significant work but provides the richest feature set.
224224+225225+---
226226+227227+#### 1.2 ~~Distributed Execution~~ (Not a Real Gap)
228228+229229+**Reality check:** While ocaml-docs-ci has OCluster infrastructure, **it runs on a single machine in practice**. This means:
230230+231231+- Both systems effectively use single-machine parallelism
232232+- day10's fork-based approach (`--fork N`) is comparable to ocaml-docs-ci's actual operation
233233+- OCluster adds overhead without providing real distribution benefits in current deployment
234234+235235+**Conclusion:** This is **not a gap** for the migration. day10's existing parallelism model is sufficient.
236236+237237+**Future consideration:** If true distribution becomes needed, day10 could add OCluster support, but this is not required for feature parity with the current production system.
238238+239239+---
240240+241241+#### 1.3 Epoch-Based Deployment
242242+243243+**ocaml-docs-ci has:**
244244+- Epoch system for versioned artifact collections
245245+- Atomic promotion via symlinks (html-current → html-live)
246246+- Garbage collection of old epochs
247247+- Safe rollback capability
248248+249249+**day10 lacks:**
250250+- No epoch concept
251251+- Direct file output
252252+- No atomic update mechanism
253253+254254+**Implementation Required:**
255255+- Add epoch directory management
256256+- Implement symlink-based promotion
257257+- Add epoch cleanup/GC functionality
258258+- Support for `html-current` → `html-live` workflow
259259+260260+---
261261+262262+#### 1.4 Web UI & Monitoring
263263+264264+**ocaml-docs-ci has:**
265265+- OCurrent-based web dashboard
266266+- Real-time pipeline status
267267+- Job logs viewable in browser
268268+- Package-level status tracking
269269+270270+**day10 lacks:**
271271+- No web interface
272272+- CLI-only interaction
273273+- No real-time monitoring
274274+275275+**Implementation Options:**
276276+1. **Use OCurrent web** - If integrating with OCurrent
277277+2. **Build custom web UI** - Separate web service reading day10 state
278278+3. **Static status pages** - Generate HTML status reports
279279+280280+**Recommended:** Option 1 if using OCurrent, otherwise Option 3 for minimal viable monitoring.
281281+282282+---
283283+284284+#### 1.5 Remote API
285285+286286+**ocaml-docs-ci has:**
287287+- Cap'n Proto RPC API for querying pipeline state
288288+- Package status queries
289289+- Pipeline health checks
290290+- CLI client (ocaml-docs-ci-client)
291291+292292+**day10 lacks:**
293293+- No remote API
294294+- No programmatic access to state
295295+- Cannot query status without reading files
296296+297297+**Implementation Options:**
298298+1. **Add Cap'n Proto service** - Match ocaml-docs-ci interface
299299+2. **REST API** - Simpler but different from existing ecosystem
300300+3. **GraphQL** - Modern but overkill for this use case
301301+302302+**Recommended:** Option 1 for compatibility with existing tooling.
303303+304304+---
305305+306306+### 2. MODERATE GAPS (Should Have)
307307+308308+#### 2.1 Database-Backed State
309309+310310+**ocaml-docs-ci:** SQLite database tracking pipeline runs, package statuses, build history
311311+312312+**day10:** File-based state (layer.json, JSON outputs)
313313+314314+**Gap Impact:** Harder to query historical data, no pipeline-level tracking
315315+316316+**Implementation:** Add SQLite or similar for tracking builds over time
317317+318318+---
319319+320320+#### 2.2 Solver Service Architecture
321321+322322+**ocaml-docs-ci:** External solver service via Cap'n Proto, can run multiple solvers in parallel
323323+324324+**day10:** In-process solving, one solve at a time per fork
325325+326326+**Gap Impact:** Potentially slower for large solve operations
327327+328328+**Implementation:** Could extract solver to service, but current approach works
329329+330330+---
331331+332332+#### 2.3 Valid Package List Publishing
333333+334334+**ocaml-docs-ci:** Publishes list of successfully-built packages for ocaml.org filtering
335335+336336+**day10:** No concept of valid package list
337337+338338+**Implementation:** Add post-build step to generate/publish valid package manifest
339339+340340+---
341341+342342+### 3. MINOR GAPS (Nice to Have)
343343+344344+#### 3.1 Storage Server Integration
345345+346346+**ocaml-docs-ci:** SSH/rsync to remote storage server, automatic sync
347347+348348+**day10:** Local filesystem, manual sync-docs command
349349+350350+**Gap Impact:** Requires additional orchestration for remote deployment
351351+352352+---
353353+354354+#### 3.2 Multiple opam-repository Sources
355355+356356+**ocaml-docs-ci:** Tracks specific git repository with commit history
357357+358358+**day10:** Supports multiple local paths, no git tracking
359359+360360+**Gap Impact:** Cannot automatically detect new packages
361361+362362+---
363363+364364+### 4. DAY10 ADVANTAGES
365365+366366+Features day10 has that ocaml-docs-ci lacks:
367367+368368+| Feature | Benefit |
369369+|---------|---------|
370370+| **Windows Support** | Can build Windows packages |
371371+| **FreeBSD Support** | Can build BSD packages |
372372+| **Simpler Deployment** | No cluster infrastructure needed |
373373+| **Layer-based Caching** | More efficient disk usage with overlay2 |
374374+| **Standalone Operation** | Works without external services (OCluster, solver-service) |
375375+| **Direct Container Control** | Lower latency, no scheduler overhead |
376376+| **Equivalent Parallelism** | Fork-based model matches ocaml-docs-ci's actual single-machine operation |
377377+| **Simpler Debugging** | No distributed system complexity to troubleshoot |
378378+379379+---
380380+381381+## Ecosystem Integration
382382+383383+### Voodoo Integration
384384+385385+Both day10 and ocaml-docs-ci use the same documentation toolchain:
386386+387387+```
388388+ ┌─────────────────┐
389389+ │ voodoo-prep │
390390+ │ (artifact prep) │
391391+ └────────┬────────┘
392392+ │
393393+ ┌───────────────┴───────────────┐
394394+ ▼ ▼
395395+ ┌─────────────────┐ ┌─────────────────┐
396396+ │ voodoo-do │ │odoc_driver_voodoo│
397397+ │ (compile/link) │ │ (all-in-one) │
398398+ └────────┬────────┘ └────────┬────────┘
399399+ │ │
400400+ └───────────────┬───────────────┘
401401+ ▼
402402+ ┌─────────────────┐
403403+ │ voodoo-gen │
404404+ │ (HTML output) │
405405+ └─────────────────┘
406406+```
407407+408408+**day10 uses:** odoc_driver_voodoo (modern unified approach)
409409+**ocaml-docs-ci uses:** Both voodoo-do and odoc_driver_voodoo
410410+411411+**Integration Status:** ✅ Compatible - both can produce compatible output
412412+413413+### OCluster Integration (Optional - Not Required for Parity)
414414+415415+**Note:** Since ocaml-docs-ci runs on a single machine in practice, OCluster integration is **not required** for feature parity. day10's existing fork-based parallelism provides equivalent functionality.
416416+417417+```
418418+Current ocaml-docs-ci reality:
419419+┌─────────────────────────────────────────────────────────────┐
420420+│ OCluster Scheduler │
421421+│ (Single Machine) │
422422+└─────────────────────────┬───────────────────────────────────┘
423423+ │
424424+ ▼
425425+ ┌───────────┐
426426+ │ Worker │ ← All workers on same machine
427427+ │ (linux- │
428428+ │ x86_64) │
429429+ └───────────┘
430430+```
431431+432432+**If future scaling is needed**, day10 could add OCluster:
433433+1. Add `current_ocluster` dependency
434434+2. Generate OBuilder specs from day10 build commands
435435+3. Submit jobs via OCluster API
436436+4. Collect results from worker output
437437+438438+But this is a **future enhancement**, not a migration requirement.
439439+440440+### Solver Service Integration
441441+442442+The solver-service repository provides a standalone solving service:
443443+444444+```
445445+┌──────────────┐ Cap'n Proto ┌────────────────┐
446446+│ day10 │ ─────────────────── │ solver-service │
447447+│ (client) │ solve() │ (server) │
448448+└──────────────┘ └────────────────┘
449449+```
450450+451451+**Current day10:** In-process opam-0install
452452+**Migration option:** Use solver-service for consistency with ecosystem
453453+454454+---
455455+456456+## Implementation Roadmap
457457+458458+### Phase 1: Core Infrastructure (Weeks 1-4)
459459+460460+**Goal:** Establish foundation for docs.ocaml.org integration
461461+462462+| Task | Priority | Effort | Dependencies |
463463+|------|----------|--------|--------------|
464464+| 1.1 Add epoch management | P0 | Medium | None |
465465+| 1.2 Implement valid package list | P0 | Low | None |
466466+| 1.3 Add remote storage sync (SSH/rsync) | P0 | Medium | None |
467467+| 1.4 SQLite state tracking | P1 | Medium | None |
468468+469469+**Deliverable:** day10 can produce epoch-structured output compatible with docs.ocaml.org
470470+471471+### Phase 2: Change Detection (Weeks 5-8)
472472+473473+**Goal:** Automatic rebuilding on opam-repository changes
474474+475475+| Task | Priority | Effort | Dependencies |
476476+|------|----------|--------|--------------|
477477+| 2.1 Git-based opam-repo tracking | P0 | Medium | None |
478478+| 2.2 Change detection algorithm | P0 | High | 2.1 |
479479+| 2.3 Dependency-aware rebuild | P1 | High | 2.2 |
480480+| 2.4 Incremental solution updates | P1 | Medium | 2.2 |
481481+482482+**Deliverable:** day10 can detect and rebuild changed packages automatically
483483+484484+### Phase 3: ~~Distributed Execution~~ Skipped
485485+486486+**Not required:** Since ocaml-docs-ci runs on a single machine in practice, day10's existing fork-based parallelism (`--fork N`) provides equivalent functionality. OCluster integration can be added later if true distribution becomes necessary.
487487+488488+**Time saved:** 6 weeks
489489+490490+### Phase 3 (was 4): Monitoring & API (Weeks 9-12)
491491+492492+**Goal:** Production observability and integration
493493+494494+| Task | Priority | Effort | Dependencies |
495495+|------|----------|--------|--------------|
496496+| 3.1 Cap'n Proto API service | P1 | High | 1.4 |
497497+| 3.2 Status query endpoints | P1 | Medium | 3.1 |
498498+| 3.3 Web dashboard (or static pages) | P2 | Medium | 3.1 |
499499+| 3.4 Health check endpoints | P2 | Low | 3.1 |
500500+501501+**Note:** API/monitoring is lower priority if day10 runs as a batch job (like ocaml-docs-ci in practice).
502502+503503+**Deliverable:** day10 provides status visibility (at minimum via static pages/JSON)
504504+505505+### Phase 4 (was 5): Migration & Cutover (Weeks 13-16)
506506+507507+**Goal:** Replace ocaml-docs-ci in production
508508+509509+| Task | Priority | Effort | Dependencies |
510510+|------|----------|--------|--------------|
511511+| 4.1 Parallel run comparison | P0 | Medium | All above |
512512+| 4.2 Output compatibility validation | P0 | Medium | 4.1 |
513513+| 4.3 Gradual traffic shift | P0 | Low | 4.2 |
514514+| 4.4 Full cutover | P0 | Low | 4.3 |
515515+| 4.5 ocaml-docs-ci deprecation | P2 | Low | 4.4 |
516516+517517+**Deliverable:** day10 is the production system for docs.ocaml.org
518518+519519+### Revised Timeline Summary
520520+521521+| Phase | Original | Revised | Savings |
522522+|-------|----------|---------|---------|
523523+| Core Infrastructure | Weeks 1-4 | Weeks 1-4 | - |
524524+| Change Detection | Weeks 5-8 | Weeks 5-8 | - |
525525+| Distributed Execution | Weeks 9-14 | Skipped | 6 weeks |
526526+| Monitoring & API | Weeks 15-18 | Weeks 9-12 | - |
527527+| Migration | Weeks 19-22 | Weeks 13-16 | - |
528528+| **Total** | **22 weeks** | **16 weeks** | **6 weeks** |
529529+530530+---
531531+532532+## Risk Assessment
533533+534534+### High Risk
535535+536536+| Risk | Probability | Impact | Mitigation |
537537+|------|-------------|--------|------------|
538538+| Output format incompatibility | Low | High | Comprehensive comparison testing |
539539+| Epoch management bugs | Medium | High | Extensive testing, staged rollout |
540540+541541+### Medium Risk
542542+543543+| Risk | Probability | Impact | Mitigation |
544544+|------|-------------|--------|------------|
545545+| Performance regression | Medium | Medium | Benchmark early, optimize iteratively |
546546+| Change detection complexity | Medium | Medium | Start with simple polling approach |
547547+| State tracking gaps | Medium | Medium | Design carefully, review with team |
548548+549549+### Low Risk
550550+551551+| Risk | Probability | Impact | Mitigation |
552552+|------|-------------|--------|------------|
553553+| Voodoo incompatibility | Low | High | Already using same tools |
554554+| Platform regressions | Low | Low | Existing test coverage |
555555+| Parallelism issues | Low | Low | Both systems use single-machine model |
556556+557557+**Note:** OCluster integration risk removed since it's not required for parity.
558558+559559+---
560560+561561+## Recommendations
562562+563563+### Immediate Actions
564564+565565+1. **Validate voodoo compatibility** - Confirm day10 and ocaml-docs-ci produce identical HTML output for the same package
566566+2. **Design epoch system** - Document epoch structure and promotion workflow
567567+3. **Prototype change detection** - Simple git-based tracking of opam-repository changes
568568+569569+### Architecture Decision
570570+571571+**Recommended Approach:** Incremental enhancement of day10
572572+573573+Since both systems run on single machines in practice, day10's architecture is actually well-suited for the task. The migration is simpler than the theoretical architecture comparison suggests.
574574+575575+**Key additions needed:**
576576+1. **Epoch management** - For atomic deployments (similar to ocaml-docs-ci)
577577+2. **Change detection** - Git-based tracking of opam-repository
578578+3. **Valid package list** - For ocaml.org integration
579579+4. **Status reporting** - JSON/static HTML for visibility
580580+581581+**Not needed for parity:**
582582+- OCluster integration (single-machine in practice)
583583+- Full OCurrent reactive pipeline (can use simpler cron/polling)
584584+- Cap'n Proto API (if batch job model is acceptable)
585585+586586+### Simplest Migration Path
587587+588588+Rather than adding OCurrent complexity, consider a simpler operational model:
589589+590590+```bash
591591+# Cron job or systemd timer
592592+while true; do
593593+ git -C /opam-repo pull
594594+ if [ $(git rev-parse HEAD) != $(cat /state/last-commit) ]; then
595595+ day10 batch --cache-dir /cache --opam-repository /opam-repo \
596596+ --html-output /data/html-current @changed-packages.json
597597+ # Atomic promotion
598598+ ln -sfn /data/html-current /data/html-live
599599+ git rev-parse HEAD > /state/last-commit
600600+ fi
601601+ sleep 3600
602602+done
603603+```
604604+605605+This provides:
606606+- Automatic change detection
607607+- Incremental rebuilding
608608+- Atomic deployments
609609+- No additional infrastructure
610610+611611+### Alternative: OCurrent Wrapper
612612+613613+If reactive behavior and web UI are required, wrap day10 in OCurrent:
614614+615615+```ocaml
616616+(* Hypothetical OCurrent pipeline using day10 *)
617617+let pipeline =
618618+ let packages = track_opam_repo () in
619619+ let solutions = Current.list_map solve packages in
620620+ let builds = Current.list_map (day10_build ~config) solutions in
621621+ let docs = Current.list_map (day10_docs ~config) builds in
622622+ publish_epoch docs
623623+```
624624+625625+This adds complexity but provides OCurrent's monitoring and caching.
626626+627627+---
628628+629629+## Appendix A: File Structure Comparison
630630+631631+### day10 Output Structure
632632+633633+```
634634+cache_dir/
635635+├── {os_key}/
636636+│ ├── base/fs/
637637+│ ├── build-{hash}/
638638+│ │ ├── fs/
639639+│ │ └── layer.json
640640+│ └── doc-{hash}/
641641+│ ├── fs/
642642+│ │ └── html/
643643+│ │ ├── p/{pkg}/{ver}/
644644+│ │ └── u/{universe}/{pkg}/{ver}/
645645+│ └── layer.json
646646+└── solutions/
647647+ └── {repo-sha}/
648648+ └── {pkg}.json
649649+```
650650+651651+### ocaml-docs-ci Output Structure
652652+653653+```
654654+/data/
655655+├── prep/
656656+│ └── universes/{u}/{pkg}/{ver}/
657657+├── compile/
658658+│ ├── p/{pkg}/{ver}/
659659+│ └── u/{u}/{pkg}/{ver}/
660660+├── linked/
661661+│ ├── p/{pkg}/{ver}/
662662+│ └── u/{u}/{pkg}/{ver}/
663663+├── html-raw/
664664+│ ├── p/{pkg}/{ver}/
665665+│ └── u/{u}/{pkg}/{ver}/
666666+└── epoch-{hash}/
667667+ └── html/
668668+ └── (symlinks to html-raw)
669669+```
670670+671671+---
672672+673673+## Appendix B: Glossary
674674+675675+| Term | Definition |
676676+|------|------------|
677677+| **Epoch** | A versioned collection of documentation artifacts, enabling atomic updates |
678678+| **Blessed** | The canonical/primary documentation version for a package (lives in `p/`) |
679679+| **Universe** | A specific set of package dependencies, identified by hash |
680680+| **Layer** | An overlay2 filesystem layer containing build artifacts |
681681+| **OCluster** | OCaml's distributed build cluster system |
682682+| **OCurrent** | Reactive CI/CD pipeline framework for OCaml |
683683+| **voodoo** | Documentation preparation and generation toolchain |
684684+| **odoc_driver_voodoo** | Unified driver for odoc compilation/linking/generation |
685685+686686+---
687687+688688+## Appendix C: Related Repositories
689689+690690+| Repository | Purpose | URL |
691691+|------------|---------|-----|
692692+| ocaml-docs-ci | Current docs.ocaml.org CI | github.com/ocurrent/ocaml-docs-ci |
693693+| voodoo | Doc preparation tools | github.com/ocaml-doc/voodoo |
694694+| ocluster | Distributed build cluster | github.com/ocurrent/ocluster |
695695+| solver-service | Dependency solving service | github.com/ocurrent/solver-service |
696696+| odoc | Documentation compiler | github.com/ocaml/odoc |
697697+
+357
day10/docs/plans/2026-02-03-fresh-docs-design.md
···11+# Fresh Docs with Graceful Degradation
22+33+**Date:** 2026-02-03
44+**Status:** Proposed
55+**Author:** Brainstorming session
66+77+## Overview
88+99+This document describes the design for day10's documentation generation strategy, which differs fundamentally from ocaml-docs-ci. The key principle is "always fresh, always safe" - docs are rebuilt against the current opam-repository state, but existing working docs are never destroyed by a failed rebuild.
1010+1111+## Background
1212+1313+### The Problem with ocaml-docs-ci
1414+1515+ocaml-docs-ci computes a solution once per package and caches it forever. This causes:
1616+1717+- **Link rot**: Package A's docs link to dependency B v2.0, but B is now at v5.0
1818+- **Stale cross-references**: Over time, docs reference increasingly outdated dependency versions
1919+- **Append-only constraint**: New builds can never overwrite old builds
2020+2121+### day10's Approach
2222+2323+day10 always solves against the current opam-repository state:
2424+2525+- **Fresh cross-references**: Docs always link to current dependency versions
2626+- **Graceful degradation**: Only replace docs when the new build succeeds
2727+- **Fast recovery**: Layer caching means re-runs after fixing issues are fast
2828+2929+## Design
3030+3131+### Core Principle
3232+3333+Every run:
3434+1. Solve all packages against current opam-repository
3535+2. Build all packages (layer cache makes unchanged builds fast)
3636+3. Generate docs where dependency docs succeeded
3737+4. Atomically swap successful docs into place
3838+5. Preserve existing docs on failure
3939+4040+### Two-Level Update Strategy
4141+4242+#### Level 1: Package Swaps (frequent)
4343+4444+For normal operation - individual packages rebuild as dependencies change.
4545+4646+Each package's docs live in a self-contained directory:
4747+```
4848+html/p/{package}/{version}/
4949+```
5050+5151+Update sequence for successful rebuild:
5252+1. Write new docs to `html/p/{package}/{version}.new/`
5353+2. Swap directories:
5454+ ```
5555+ mv html/p/{package}/{version} html/p/{package}/{version}.old
5656+ mv html/p/{package}/{version}.new html/p/{package}/{version}
5757+ ```
5858+3. Remove `.old` directory
5959+6060+If the build fails, no swap occurs - the original directory remains untouched.
6161+6262+**Recovery from interrupted swap:** If the process dies between renames, the next run detects `.new` or `.old` directories and cleans up before proceeding.
6363+6464+#### Level 2: Epoch Transitions (rare)
6565+6666+For major structural changes:
6767+- New odoc version with different HTML output format
6868+- URL scheme changes
6969+- Full rebuild from scratch
7070+7171+Epoch mechanism:
7272+```
7373+/data/
7474+├── epoch-abc123/ ← currently live
7575+│ └── html/p/...
7676+├── epoch-def456/ ← being built
7777+│ └── html/p/...
7878+└── html-live -> epoch-abc123/html ← symlink
7979+```
8080+8181+During epoch transition:
8282+1. Old epoch continues serving traffic
8383+2. New epoch builds completely in parallel
8484+3. Atomically switch the `html-live` symlink when ready
8585+4. Keep old epoch briefly for rollback, then garbage collect
8686+8787+### Pipeline Structure
8888+8989+The pipeline has two independent phases with different dependency rules:
9090+9191+| Phase | Depends On | Blocked By |
9292+|-------|------------|------------|
9393+| **Build** | Dependency *builds* | Dependency build failure |
9494+| **Docs** | Package build + dependency *docs* | Build failure OR dependency docs failure |
9595+9696+#### Failure Propagation Example
9797+9898+```
9999+ocaml-base-compiler build: ✓
100100+ocaml-base-compiler docs: ✗ (odoc bug)
101101+ │
102102+ ├─► astring build: ✓ (proceeds - only needs build artifacts)
103103+ │ astring docs: ⊘ (skipped - dependency docs missing)
104104+ │ │
105105+ │ └─► yaml build: ✓ (proceeds)
106106+ │ yaml docs: ⊘ (skipped - transitive docs failure)
107107+ │
108108+ └─► fmt build: ✓
109109+ fmt docs: ⊘ (skipped)
110110+```
111111+112112+#### Benefits
113113+114114+1. **Fast recovery** - When odoc is fixed, all builds are cache hits; only docs regenerate
115115+2. **Complete build reporting** - Get build status and logs for all packages
116116+3. **Isolated blast radius** - Docs-only problems don't block builds
117117+4. **Better diagnostics** - Clear distinction between "build failed" vs "docs skipped"
118118+119119+#### Status Values
120120+121121+Each package reports one of:
122122+- `build: success, docs: success` - Fully working
123123+- `build: success, docs: failed` - Build ok, docs generation failed
124124+- `build: success, docs: skipped` - Build ok, docs skipped (dependency docs missing)
125125+- `build: failed, docs: skipped` - Build failed, docs not attempted
126126+127127+### Error Handling
128128+129129+#### Principle: Fail Fast, Fail Clearly
130130+131131+Any error within a layer causes the entire layer to fail. No partial successes.
132132+133133+#### Retry Within Run
134134+135135+Before marking a layer as failed, retry with exponential backoff:
136136+137137+```
138138+Attempt 1: immediate
139139+Attempt 2: wait 5s
140140+Attempt 3: wait 15s
141141+→ Give up, mark failed
142142+```
143143+144144+This handles transient failures without waiting for the next run.
145145+146146+#### What Counts as Failure
147147+148148+- Non-zero exit code from build/odoc
149149+- Timeout exceeded
150150+- OOM killed
151151+- Any exception during layer creation
152152+153153+### Operational Model
154154+155155+#### Triggering
156156+157157+**Primary: Webhook on opam-repository push**
158158+159159+A lightweight HTTP endpoint receives GitHub webhook:
160160+```
161161+POST /webhook/opam-repository
162162+ → Validate signature
163163+ → Trigger day10 run (async)
164164+ → Queue if run already in progress
165165+```
166166+167167+**Fallback: Daily cron**
168168+```
169169+0 4 * * * flock -n /var/run/day10.lock day10 batch ...
170170+```
171171+172172+#### Run Sequence
173173+174174+1. Pull latest opam-repository
175175+2. Solve all target packages against current state
176176+3. Build all packages (layer cache = fast for unchanged)
177177+4. Generate docs where dependency docs succeeded
178178+5. Atomic swap successful docs, preserve old on failure
179179+180180+### Notifications
181181+182182+On run completion with failures, post to Zulip:
183183+184184+```
185185+📦 day10 run completed
186186+187187+✓ 3,542 packages built
188188+✓ 3,201 docs generated
189189+✗ 12 build failures
190190+✗ 8 doc failures (23 skipped due to dependencies)
191191+192192+Failed builds:
193193+ - some-package.1.2.3: exit code 2
194194+ - another-pkg.0.5.0: timeout after 600s
195195+196196+Failed docs:
197197+ - broken-docs.1.0.0: odoc error
198198+199199+Full logs: /var/log/day10/runs/2026-02-03-1234/
200200+```
201201+202202+### Log Retention
203203+204204+All logs kept permanently:
205205+206206+```
207207+/var/log/day10/
208208+├── runs/
209209+│ └── 2026-02-03-1234/
210210+│ ├── summary.json
211211+│ ├── build/
212212+│ │ ├── some-package.1.2.3.log
213213+│ │ └── another-pkg.0.5.0.log
214214+│ └── docs/
215215+│ └── broken-docs.1.0.0.log
216216+└── latest -> runs/2026-02-03-1234
217217+```
218218+219219+Logs include stdout, stderr, exit code, timing, and retry attempts.
220220+221221+### Garbage Collection
222222+223223+GC runs after each successful batch run to clean up stale artifacts.
224224+225225+#### Layer GC (Aggressive)
226226+227227+Layers in the cache directory become stale when packages update (new opam file → new layer hash). Clean up aggressively since regeneration is fast.
228228+229229+After each run:
230230+1. Collect all layer hashes referenced by current solutions
231231+2. List all layers in cache directory
232232+3. Delete any layer not in the referenced set
233233+234234+```ocaml
235235+let gc_layers ~cache_dir ~current_solutions =
236236+ let referenced =
237237+ current_solutions
238238+ |> List.concat_map (fun sol -> sol.layer_hashes)
239239+ |> String.Set.of_list
240240+ in
241241+ let all_layers = Sys.readdir (cache_dir / "layers") in
242242+ Array.iter (fun layer ->
243243+ if not (Set.mem layer referenced) then
244244+ rm_rf (cache_dir / "layers" / layer)
245245+ ) all_layers
246246+```
247247+248248+#### Universe GC (Preserve Until Replaced)
249249+250250+Universe directories (`html/u/{universe-hash}/...`) contain docs for specific dependency combinations. A universe stays alive as long as at least one blessed package references it.
251251+252252+**Universe references stored with package docs:**
253253+254254+Each blessed package's docs directory includes a `universes.json` listing which universes it references:
255255+256256+```
257257+html/p/{package}/{version}/
258258+├── index.html
259259+├── Pkg_module/index.html
260260+└── universes.json # {"universes": ["abc123", "def456"]}
261261+```
262262+263263+This file is written during doc generation and moves atomically with the docs (same `.new`/`.old` swap). If a rebuild fails, the old `universes.json` stays in place, keeping old universe references alive.
264264+265265+After each run:
266266+1. Scan all `html/p/*/*/universes.json` files
267267+2. Collect all referenced universe hashes
268268+3. Delete any universe directory not referenced by any blessed package
269269+270270+```ocaml
271271+let gc_universes ~html_dir =
272272+ (* Collect all universe refs from all blessed packages *)
273273+ let referenced =
274274+ Glob.find (html_dir / "p" / "*" / "*" / "universes.json")
275275+ |> List.concat_map (fun path ->
276276+ let json = Yojson.Safe.from_file path in
277277+ json |> member "universes" |> to_list |> List.map to_string
278278+ )
279279+ |> String.Set.of_list
280280+ in
281281+282282+ (* Delete unreferenced universes *)
283283+ Sys.readdir (html_dir / "u")
284284+ |> Array.iter (fun hash ->
285285+ if not (Set.mem hash referenced) then
286286+ rm_rf (html_dir / "u" / hash)
287287+ )
288288+```
289289+290290+Benefits:
291291+- Universe refs move atomically with the docs (same swap mechanism)
292292+- Failed rebuild keeps old `universes.json`, so old universes stay alive
293293+- No separate manifest that could get out of sync
294294+- Truth derived from actual docs structure
295295+296296+## Implementation Status
297297+298298+### day10 Core
299299+300300+1. **Staging directory support** ✅ IMPLEMENTED
301301+ - Write docs to staging temp directory during generation
302302+ - Atomic swap on success using mv operations
303303+ - Clean up `.new` and `.old` artifacts on batch startup
304304+ - Commits: 7790e74, 4dc8bf4
305305+306306+2. **Failure preservation** ✅ IMPLEMENTED
307307+ - If build/docs fail, existing output is preserved (graceful degradation)
308308+ - Logging indicates "kept old docs" vs "atomic swap: successfully committed"
309309+ - Commit: 7790e74
310310+311311+3. **Garbage collection** ✅ IMPLEMENTED
312312+ - Layer GC: Deletes unreferenced build/doc layers after batch run
313313+ - Universe GC: Deletes unreferenced universe directories
314314+ - universes.json written during doc generation for GC tracking
315315+ - Commits: bc6cfde, 1bc4d5e, b02c30a
316316+317317+4. **Epoch awareness** ⏳ NOT YET IMPLEMENTED
318318+ - New `--epoch` flag to specify epoch directory
319319+ - New `promote-epoch` command for symlink switch
320320+321321+5. **Build/docs phase separation** ⏳ NOT YET IMPLEMENTED
322322+ - Track build success independently from docs success
323323+ - Continue builds even when dependency docs fail
324324+ - Skip docs only when dependency docs missing
325325+326326+### New Components
327327+328328+1. **Webhook handler** ⏳ NOT YET IMPLEMENTED - Small HTTP service to receive GitHub webhooks
329329+2. **Zulip notifier** ⏳ NOT YET IMPLEMENTED - Integration with ocaml-zulip library
330330+ - Note: ocaml-zulip and dependencies not in opam-repository
331331+ - Use custom opam repo: https://tangled.org/anil.recoil.org/aoah-opam-repo
332332+3. **Log management** ✅ IMPLEMENTED - Structured logging with run directories
333333+ - Run directories: runs/{YYYY-MM-DD-HHMMSS}/
334334+ - summary.json with statistics and failures
335335+ - Build/doc logs symlinked into run directories
336336+ - 'latest' symlink for easy access
337337+ - Commit: 05d396b
338338+339339+## Comparison to ocaml-docs-ci
340340+341341+| Aspect | ocaml-docs-ci | day10 (this design) |
342342+|--------|---------------|---------------------|
343343+| Solutions | Cached forever | Fresh every run |
344344+| Cross-references | Drift over time | Always current |
345345+| On doc failure | Blocks dependent builds | Builds continue, only docs skip |
346346+| Update mechanism | Append-only | Atomic swap on success |
347347+| Infrastructure | OCurrent + OCluster | day10 + webhook + cron |
348348+| Recovery | Complex rebuild process | Re-run (layer cache hits) |
349349+| Notifications | OCurrent web UI | Zulip |
350350+351351+## Open Questions
352352+353353+None at this time.
354354+355355+## References
356356+357357+- [Gap Analysis: day10 vs ocaml-docs-ci](/workspace/docs/GAP_ANALYSIS.md)
···11+# day10-web: Status Dashboard Design
22+33+**Date:** 2026-02-04
44+**Status:** Approved
55+**Author:** Brainstorming session
66+77+## Overview
88+99+A web frontend for day10 that allows package maintainers to check their package status and operators to monitor system health. It runs as a separate service that reads day10's output directories.
1010+1111+## Audience
1212+1313+1. **Package maintainers** - Want to see if their packages are building/documented correctly, investigate failures
1414+2. **day10 operators/admins** - Monitoring system health, viewing logs, managing runs
1515+1616+Not intended as a general documentation browser (that's what the generated HTML at `/docs/` is for).
1717+1818+## Architecture
1919+2020+```
2121+┌─────────────┐ writes ┌──────────────────────────┐
2222+│ day10 │ ───────────────►│ /data/ │
2323+│ (batch) │ │ ├── cache/logs/ │
2424+└─────────────┘ │ │ ├── runs/ │
2525+ │ │ │ └── summary.json│
2626+ │ │ └── latest │
2727+ │ └── html/ │
2828+┌─────────────┐ reads │ └── p/{pkg}/{ver}/ │
2929+│ day10-web │ ◄───────────────┤ │
3030+│ (Dream) │ └──────────────────────────┘
3131+└─────────────┘
3232+ │
3333+ ▼
3434+ HTTP :8080
3535+```
3636+3737+**Key properties:**
3838+- No database - all state derived from filesystem
3939+- Read-only access to day10's directories
4040+- Single configuration: paths to cache-dir and html-dir
4141+- Lightweight: `day10-web --cache-dir /data/cache --html-dir /data/html`
4242+4343+## Pages and Routes
4444+4545+### Dashboard (`/`)
4646+4747+- Overview cards: total packages, build success rate, doc success rate
4848+- Latest run summary (timestamp, duration, pass/fail counts)
4949+- Link to full run history
5050+5151+### Package List (`/packages`)
5252+5353+- Searchable/filterable table of all packages
5454+- Columns: package name, version, build status, doc status, last updated
5555+- Click through to package detail
5656+5757+### Package Detail (`/packages/{name}/{version}`)
5858+5959+- Build status with link to build log
6060+- Doc status with link to doc log and generated docs
6161+- Dependencies tab: what this package depends on (with their statuses)
6262+- Reverse dependencies tab: what depends on this package
6363+- Solver solution: OCaml version, full dependency list with versions
6464+6565+### Run History (`/runs`)
6666+6767+- List of all batch runs (timestamp, duration, success/fail counts)
6868+- Click through to run detail
6969+7070+### Run Detail (`/runs/{run-id}`)
7171+7272+- Full summary.json data displayed nicely
7373+- List of failures with links to logs
7474+- Filterable list of all packages processed in that run
7575+7676+## Data Sources
7777+7878+All data is read from the filesystem:
7979+8080+### Run data (`{cache-dir}/logs/`)
8181+8282+| Path | Provides |
8383+|------|----------|
8484+| `runs/` directory listing | Run history |
8585+| `runs/{id}/summary.json` | Run statistics, failure list |
8686+| `runs/{id}/build/*.log` | Build logs |
8787+| `runs/{id}/docs/*.log` | Doc generation logs |
8888+| `latest` symlink | Most recent run |
8989+9090+### Package data (`{cache-dir}/{platform}/`)
9191+9292+| Path | Provides |
9393+|------|----------|
9494+| `solutions/` | Cached solver results (deps, OCaml version) |
9595+| `build-*/layer.json` | Build metadata and status |
9696+| `doc-*/layer.json` | Doc generation metadata and status |
9797+9898+### Generated docs (`{html-dir}/`)
9999+100100+| Path | Provides |
101101+|------|----------|
102102+| `p/{pkg}/{ver}/` existence | Doc generation succeeded |
103103+| Direct links | Link to generated documentation |
104104+105105+### Dependency graph
106106+107107+- Built from solutions data
108108+- Forward deps: parse the solution for a package
109109+- Reverse deps: scan all solutions (indexed on startup)
110110+111111+## UI Approach
112112+113113+### Rendering: Server-side HTML with minimal JS
114114+115115+Dream renders HTML directly using its built-in HTML DSL or Tyxml. No heavy frontend framework:
116116+117117+- HTML pages rendered on server
118118+- Small amount of vanilla JS for search/filtering
119119+- CSS styling (Pico CSS or simple custom styles)
120120+121121+### Why this approach
122122+123123+- Simpler to build and maintain
124124+- No frontend build pipeline
125125+- Fast initial page loads
126126+- Works without JavaScript for core functionality
127127+- Fits "operational dashboard" use case
128128+129129+### Visual style
130130+131131+- Clean, functional dashboard aesthetic
132132+- Status badges: green (success), red (failed), yellow (skipped)
133133+- Sortable tables for package lists
134134+- Collapsible sections for dependency trees
135135+- Syntax highlighting for logs (highlight.js)
136136+137137+### Log viewer
138138+139139+- Display logs inline with scrolling
140140+- Link to raw log file for download
141141+- Client-side search within log
142142+143143+## Project Structure
144144+145145+```
146146+/workspace/
147147+├── day10.opam # Existing - the batch runner
148148+├── day10-web.opam # New - the web frontend
149149+├── bin/
150150+│ └── main.ml # Existing day10 CLI
151151+├── lib/ # Existing day10_lib
152152+├── web/
153153+│ ├── dune
154154+│ ├── main.ml # day10-web entry point
155155+│ ├── server.ml # Dream routes and handlers
156156+│ ├── views/
157157+│ │ ├── layout.ml # Common HTML layout
158158+│ │ ├── dashboard.ml # Dashboard page
159159+│ │ ├── packages.ml # Package list and detail pages
160160+│ │ └── runs.ml # Run history and detail pages
161161+│ ├── data/
162162+│ │ ├── run_data.ml # Read summary.json, logs
163163+│ │ ├── package_data.ml # Read solutions, layer metadata
164164+│ │ └── deps.ml # Dependency graph builder
165165+│ └── static/
166166+│ ├── style.css
167167+│ └── app.js # Minimal JS for search/filter
168168+└── dune-project # Update to add day10-web package
169169+```
170170+171171+**Shared code:** `day10-web` depends on `day10_lib` to reuse types (e.g., `Run_log.summary`).
172172+173173+## CLI and Configuration
174174+175175+```
176176+day10-web [OPTIONS]
177177+178178+Required:
179179+ --cache-dir DIR Path to day10's cache directory
180180+ --html-dir DIR Path to generated documentation
181181+182182+Optional:
183183+ --port PORT HTTP port (default: 8080)
184184+ --host HOST Bind address (default: 127.0.0.1)
185185+ --platform PLATFORM Platform subdirectory (default: debian-12-x86_64)
186186+```
187187+188188+### Example usage
189189+190190+```bash
191191+# Development
192192+day10-web --cache-dir /data/cache --html-dir /data/html
193193+194194+# Production (bind to all interfaces)
195195+day10-web --cache-dir /data/cache --html-dir /data/html \
196196+ --host 0.0.0.0 --port 80
197197+```
198198+199199+### Deployment with nginx
200200+201201+```nginx
202202+server {
203203+ listen 80;
204204+ server_name docs.example.com;
205205+206206+ # Status dashboard
207207+ location / {
208208+ proxy_pass http://127.0.0.1:8080;
209209+ }
210210+211211+ # Generated documentation
212212+ location /docs/ {
213213+ alias /data/html/;
214214+ autoindex on;
215215+ }
216216+}
217217+```
218218+219219+## Error Handling
220220+221221+### Missing data
222222+223223+| Condition | Behavior |
224224+|-----------|----------|
225225+| No runs yet | Dashboard shows "No runs recorded" |
226226+| Package not found | 404 with search suggestions |
227227+| Run ID not found | 404 with link to run history |
228228+| Log file missing | "Log not available" (may be GC'd) |
229229+| Malformed JSON | Log warning, show partial data |
230230+231231+### Large data sets
232232+233233+| Data | Strategy |
234234+|------|----------|
235235+| Package list | Paginated (50/page) with search |
236236+| Run history | Paginated (20/page), most recent first |
237237+| Dependency tree | Depth-limited (2 levels), click to expand |
238238+| Reverse deps | Count with paginated list |
239239+240240+### Concurrent access
241241+242242+- Read-only filesystem access is safe
243243+- Atomic swaps mean readers see consistent state
244244+- No locking needed
245245+246246+### Startup
247247+248248+- Validate cache-dir and html-dir exist
249249+- Build reverse dependency index
250250+- Log startup time and index size
251251+252252+## Out of Scope (YAGNI)
253253+254254+- Real-time updates / WebSockets
255255+- Authentication / access control
256256+- Write operations (triggering builds)
257257+- REST API (just HTML pages for now)
258258+259259+## Dependencies
260260+261261+New opam dependencies for day10-web:
262262+- `dream` - Web framework
263263+- `tyxml` or Dream's HTML DSL - HTML generation
264264+265265+## Implementation Plan
266266+267267+1. Set up project structure (dune-project, day10-web.opam, web/ directory)
268268+2. Implement data layer (run_data.ml, package_data.ml, deps.ml)
269269+3. Implement views (layout, dashboard, packages, runs)
270270+4. Wire up Dream routes in server.ml
271271+5. Add static assets (CSS, minimal JS)
272272+6. Add CLI with cmdliner
273273+7. Update admin guide with deployment instructions
274274+8. Write tests for data layer
+1669
day10/docs/plans/2026-02-04-web-frontend-impl.md
···11+# day10-web Implementation Plan
22+33+> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
44+55+**Goal:** Build a status dashboard web frontend for day10 using OCaml and Dream.
66+77+**Architecture:** Separate service (`day10-web`) that reads day10's cache/html directories and serves HTML pages. Server-side rendering with Dream, no database, minimal JavaScript.
88+99+**Tech Stack:** OCaml 5.3+, Dream (web framework), Tyxml (HTML generation), cmdliner (CLI)
1010+1111+---
1212+1313+## Task 1: Project Setup
1414+1515+**Files:**
1616+- Modify: `/workspace/dune-project`
1717+- Create: `/workspace/web/dune`
1818+- Create: `/workspace/web/main.ml`
1919+2020+**Step 1: Add day10-web package to dune-project**
2121+2222+Edit `/workspace/dune-project` to add after the existing `(package ...)` stanza:
2323+2424+```dune
2525+(package
2626+ (name day10-web)
2727+ (synopsis "Web dashboard for day10 documentation status")
2828+ (description "Status dashboard for package maintainers and operators")
2929+ (depends
3030+ (ocaml (>= 5.3.0))
3131+ dune
3232+ dream
3333+ day10
3434+ cmdliner))
3535+```
3636+3737+**Step 2: Create web/dune file**
3838+3939+Create `/workspace/web/dune`:
4040+4141+```dune
4242+(executable
4343+ (name main)
4444+ (public_name day10-web)
4545+ (package day10-web)
4646+ (libraries dream day10_lib cmdliner unix yojson))
4747+```
4848+4949+**Step 3: Create minimal web/main.ml**
5050+5151+Create `/workspace/web/main.ml`:
5252+5353+```ocaml
5454+let () =
5555+ Dream.run
5656+ @@ Dream.logger
5757+ @@ Dream.router [
5858+ Dream.get "/" (fun _ -> Dream.html "<h1>day10-web</h1>");
5959+ ]
6060+```
6161+6262+**Step 4: Build to verify setup**
6363+6464+Run: `dune build`
6565+Expected: Builds successfully with no errors
6666+6767+**Step 5: Test the server starts**
6868+6969+Run: `dune exec web/main.exe &; sleep 2; curl http://localhost:8080; kill %1`
7070+Expected: Returns `<h1>day10-web</h1>`
7171+7272+**Step 6: Commit**
7373+7474+```bash
7575+git add dune-project web/
7676+git commit -m "feat(web): initial project setup with Dream"
7777+```
7878+7979+---
8080+8181+## Task 2: CLI with cmdliner
8282+8383+**Files:**
8484+- Modify: `/workspace/web/main.ml`
8585+8686+**Step 1: Add cmdliner CLI**
8787+8888+Replace `/workspace/web/main.ml` with:
8989+9090+```ocaml
9191+open Cmdliner
9292+9393+let cache_dir =
9494+ let doc = "Path to day10's cache directory" in
9595+ Arg.(required & opt (some dir) None & info ["cache-dir"] ~docv:"DIR" ~doc)
9696+9797+let html_dir =
9898+ let doc = "Path to generated documentation directory" in
9999+ Arg.(required & opt (some dir) None & info ["html-dir"] ~docv:"DIR" ~doc)
100100+101101+let port =
102102+ let doc = "HTTP port to listen on" in
103103+ Arg.(value & opt int 8080 & info ["port"; "p"] ~docv:"PORT" ~doc)
104104+105105+let host =
106106+ let doc = "Host address to bind to" in
107107+ Arg.(value & opt string "127.0.0.1" & info ["host"] ~docv:"HOST" ~doc)
108108+109109+let platform =
110110+ let doc = "Platform subdirectory in cache" in
111111+ Arg.(value & opt string "debian-12-x86_64" & info ["platform"] ~docv:"PLATFORM" ~doc)
112112+113113+type config = {
114114+ cache_dir : string;
115115+ html_dir : string;
116116+ port : int;
117117+ host : string;
118118+ platform : string;
119119+}
120120+121121+let run_server config =
122122+ Dream.run ~port:config.port ~interface:config.host
123123+ @@ Dream.logger
124124+ @@ Dream.router [
125125+ Dream.get "/" (fun _ -> Dream.html "<h1>day10-web</h1>");
126126+ ]
127127+128128+let main cache_dir html_dir port host platform =
129129+ let config = { cache_dir; html_dir; port; host; platform } in
130130+ run_server config
131131+132132+let cmd =
133133+ let doc = "Web dashboard for day10 documentation status" in
134134+ let info = Cmd.info "day10-web" ~version:"0.1.0" ~doc in
135135+ Cmd.v info Term.(const main $ cache_dir $ html_dir $ port $ host $ platform)
136136+137137+let () = exit (Cmd.eval cmd)
138138+```
139139+140140+**Step 2: Build and test help**
141141+142142+Run: `dune build && dune exec -- day10-web --help`
143143+Expected: Shows help with --cache-dir, --html-dir, --port, --host, --platform options
144144+145145+**Step 3: Commit**
146146+147147+```bash
148148+git add web/main.ml
149149+git commit -m "feat(web): add cmdliner CLI"
150150+```
151151+152152+---
153153+154154+## Task 3: Data Layer - Run Data
155155+156156+**Files:**
157157+- Create: `/workspace/web/data/dune`
158158+- Create: `/workspace/web/data/run_data.ml`
159159+- Create: `/workspace/web/data/run_data.mli`
160160+- Create: `/workspace/tests/unit/test_run_data.ml`
161161+- Modify: `/workspace/tests/unit/dune`
162162+163163+**Step 1: Create web/data/dune**
164164+165165+Create `/workspace/web/data/dune`:
166166+167167+```dune
168168+(library
169169+ (name day10_web_data)
170170+ (libraries unix yojson day10_lib))
171171+```
172172+173173+**Step 2: Write the failing test**
174174+175175+Create `/workspace/tests/unit/test_run_data.ml`:
176176+177177+```ocaml
178178+(** Unit tests for run data reading *)
179179+180180+let test_dir = ref ""
181181+182182+let setup () =
183183+ let dir = Filename.temp_dir "test-run-data-" "" in
184184+ test_dir := dir;
185185+ dir
186186+187187+let teardown () =
188188+ if !test_dir <> "" then begin
189189+ ignore (Sys.command (Printf.sprintf "rm -rf %s" !test_dir));
190190+ test_dir := ""
191191+ end
192192+193193+let mkdir_p path =
194194+ let rec create dir =
195195+ if not (Sys.file_exists dir) then begin
196196+ create (Filename.dirname dir);
197197+ try Unix.mkdir dir 0o755 with Unix.Unix_error (Unix.EEXIST, _, _) -> ()
198198+ end
199199+ in
200200+ create path
201201+202202+let write_file path content =
203203+ let dir = Filename.dirname path in
204204+ mkdir_p dir;
205205+ Out_channel.with_open_text path (fun oc -> Out_channel.output_string oc content)
206206+207207+(** Test: list_runs returns runs sorted by most recent first *)
208208+let test_list_runs () =
209209+ let base_dir = setup () in
210210+ let runs_dir = Filename.concat base_dir "runs" in
211211+ mkdir_p (Filename.concat runs_dir "2026-02-01-120000");
212212+ mkdir_p (Filename.concat runs_dir "2026-02-03-120000");
213213+ mkdir_p (Filename.concat runs_dir "2026-02-02-120000");
214214+215215+ let runs = Day10_web_data.Run_data.list_runs ~log_dir:base_dir in
216216+ assert (List.length runs = 3);
217217+ assert (List.hd runs = "2026-02-03-120000");
218218+219219+ teardown ();
220220+ Printf.printf "PASS: test_list_runs\n%!"
221221+222222+(** Test: read_summary parses summary.json *)
223223+let test_read_summary () =
224224+ let base_dir = setup () in
225225+ let run_dir = Filename.concat (Filename.concat base_dir "runs") "2026-02-04-120000" in
226226+ mkdir_p run_dir;
227227+ write_file (Filename.concat run_dir "summary.json") {|{
228228+ "run_id": "2026-02-04-120000",
229229+ "start_time": "2026-02-04T12:00:00",
230230+ "end_time": "2026-02-04T12:30:00",
231231+ "duration_seconds": 1800.0,
232232+ "targets_requested": 100,
233233+ "solutions_found": 95,
234234+ "build_success": 90,
235235+ "build_failed": 5,
236236+ "doc_success": 80,
237237+ "doc_failed": 5,
238238+ "doc_skipped": 5,
239239+ "failures": [{"package": "bad.1.0", "error": "build failed"}]
240240+ }|};
241241+242242+ let summary = Day10_web_data.Run_data.read_summary ~log_dir:base_dir ~run_id:"2026-02-04-120000" in
243243+ assert (Option.is_some summary);
244244+ let s = Option.get summary in
245245+ assert (s.run_id = "2026-02-04-120000");
246246+ assert (s.build_success = 90);
247247+ assert (List.length s.failures = 1);
248248+249249+ teardown ();
250250+ Printf.printf "PASS: test_read_summary\n%!"
251251+252252+(** Test: read_summary returns None for missing run *)
253253+let test_read_summary_missing () =
254254+ let base_dir = setup () in
255255+ let summary = Day10_web_data.Run_data.read_summary ~log_dir:base_dir ~run_id:"nonexistent" in
256256+ assert (Option.is_none summary);
257257+ teardown ();
258258+ Printf.printf "PASS: test_read_summary_missing\n%!"
259259+260260+(** Test: get_latest_run_id follows symlink *)
261261+let test_get_latest_run_id () =
262262+ let base_dir = setup () in
263263+ let runs_dir = Filename.concat base_dir "runs" in
264264+ mkdir_p (Filename.concat runs_dir "2026-02-04-120000");
265265+ let latest = Filename.concat base_dir "latest" in
266266+ Unix.symlink "runs/2026-02-04-120000" latest;
267267+268268+ let latest_id = Day10_web_data.Run_data.get_latest_run_id ~log_dir:base_dir in
269269+ assert (Option.is_some latest_id);
270270+ assert (Option.get latest_id = "2026-02-04-120000");
271271+272272+ teardown ();
273273+ Printf.printf "PASS: test_get_latest_run_id\n%!"
274274+275275+(** Test: read_log returns log content *)
276276+let test_read_log () =
277277+ let base_dir = setup () in
278278+ let run_dir = Filename.concat (Filename.concat base_dir "runs") "2026-02-04-120000" in
279279+ write_file (Filename.concat (Filename.concat run_dir "build") "test-pkg.1.0.log")
280280+ "Build output here\n";
281281+282282+ let content = Day10_web_data.Run_data.read_build_log
283283+ ~log_dir:base_dir ~run_id:"2026-02-04-120000" ~package:"test-pkg.1.0" in
284284+ assert (Option.is_some content);
285285+ assert (String.trim (Option.get content) = "Build output here");
286286+287287+ teardown ();
288288+ Printf.printf "PASS: test_read_log\n%!"
289289+290290+let () =
291291+ Printf.printf "Running Run_data tests...\n%!";
292292+ test_list_runs ();
293293+ test_read_summary ();
294294+ test_read_summary_missing ();
295295+ test_get_latest_run_id ();
296296+ test_read_log ();
297297+ Printf.printf "\nAll Run_data tests passed!\n%!"
298298+```
299299+300300+**Step 3: Add test to tests/unit/dune**
301301+302302+Add to `/workspace/tests/unit/dune`:
303303+304304+```dune
305305+(executable
306306+ (name test_run_data)
307307+ (libraries day10_web_data unix yojson))
308308+```
309309+310310+**Step 4: Run test to verify it fails**
311311+312312+Run: `dune build tests/unit/test_run_data.exe 2>&1`
313313+Expected: FAIL with "Unbound module Day10_web_data"
314314+315315+**Step 5: Create run_data.mli interface**
316316+317317+Create `/workspace/web/data/run_data.mli`:
318318+319319+```ocaml
320320+(** Read run data from day10's log directory *)
321321+322322+(** List all run IDs, most recent first *)
323323+val list_runs : log_dir:string -> string list
324324+325325+(** Get the latest run ID from the 'latest' symlink *)
326326+val get_latest_run_id : log_dir:string -> string option
327327+328328+(** Read summary.json for a run *)
329329+val read_summary : log_dir:string -> run_id:string -> Day10_lib.Run_log.summary option
330330+331331+(** Read a build log file *)
332332+val read_build_log : log_dir:string -> run_id:string -> package:string -> string option
333333+334334+(** Read a doc log file *)
335335+val read_doc_log : log_dir:string -> run_id:string -> package:string -> string option
336336+337337+(** List all build logs in a run *)
338338+val list_build_logs : log_dir:string -> run_id:string -> string list
339339+340340+(** List all doc logs in a run *)
341341+val list_doc_logs : log_dir:string -> run_id:string -> string list
342342+```
343343+344344+**Step 6: Implement run_data.ml**
345345+346346+Create `/workspace/web/data/run_data.ml`:
347347+348348+```ocaml
349349+(** Read run data from day10's log directory *)
350350+351351+let list_runs ~log_dir =
352352+ let runs_dir = Filename.concat log_dir "runs" in
353353+ if Sys.file_exists runs_dir && Sys.is_directory runs_dir then
354354+ Sys.readdir runs_dir
355355+ |> Array.to_list
356356+ |> List.filter (fun name ->
357357+ let path = Filename.concat runs_dir name in
358358+ Sys.is_directory path)
359359+ |> List.sort (fun a b -> String.compare b a) (* Descending *)
360360+ else
361361+ []
362362+363363+let get_latest_run_id ~log_dir =
364364+ let latest = Filename.concat log_dir "latest" in
365365+ if Sys.file_exists latest then
366366+ try
367367+ let target = Unix.readlink latest in
368368+ (* Target is like "runs/2026-02-04-120000" *)
369369+ Some (Filename.basename target)
370370+ with Unix.Unix_error _ -> None
371371+ else
372372+ None
373373+374374+let read_summary ~log_dir ~run_id =
375375+ let path = Filename.concat log_dir
376376+ (Filename.concat "runs" (Filename.concat run_id "summary.json")) in
377377+ if Sys.file_exists path then
378378+ try
379379+ let content = In_channel.with_open_text path In_channel.input_all in
380380+ let json = Yojson.Safe.from_string content in
381381+ let open Yojson.Safe.Util in
382382+ let failures =
383383+ json |> member "failures" |> to_list
384384+ |> List.map (fun f ->
385385+ (f |> member "package" |> to_string,
386386+ f |> member "error" |> to_string))
387387+ in
388388+ Some {
389389+ Day10_lib.Run_log.run_id = json |> member "run_id" |> to_string;
390390+ start_time = json |> member "start_time" |> to_string;
391391+ end_time = json |> member "end_time" |> to_string;
392392+ duration_seconds = json |> member "duration_seconds" |> to_float;
393393+ targets_requested = json |> member "targets_requested" |> to_int;
394394+ solutions_found = json |> member "solutions_found" |> to_int;
395395+ build_success = json |> member "build_success" |> to_int;
396396+ build_failed = json |> member "build_failed" |> to_int;
397397+ doc_success = json |> member "doc_success" |> to_int;
398398+ doc_failed = json |> member "doc_failed" |> to_int;
399399+ doc_skipped = json |> member "doc_skipped" |> to_int;
400400+ failures;
401401+ }
402402+ with _ -> None
403403+ else
404404+ None
405405+406406+let read_log_file path =
407407+ if Sys.file_exists path then
408408+ try Some (In_channel.with_open_text path In_channel.input_all)
409409+ with _ -> None
410410+ else
411411+ None
412412+413413+let read_build_log ~log_dir ~run_id ~package =
414414+ let path = Filename.concat log_dir
415415+ (Filename.concat "runs"
416416+ (Filename.concat run_id
417417+ (Filename.concat "build" (package ^ ".log")))) in
418418+ read_log_file path
419419+420420+let read_doc_log ~log_dir ~run_id ~package =
421421+ let path = Filename.concat log_dir
422422+ (Filename.concat "runs"
423423+ (Filename.concat run_id
424424+ (Filename.concat "docs" (package ^ ".log")))) in
425425+ read_log_file path
426426+427427+let list_logs_in_dir dir =
428428+ if Sys.file_exists dir && Sys.is_directory dir then
429429+ Sys.readdir dir
430430+ |> Array.to_list
431431+ |> List.filter (fun name -> Filename.check_suffix name ".log")
432432+ |> List.map (fun name -> Filename.chop_suffix name ".log")
433433+ |> List.sort String.compare
434434+ else
435435+ []
436436+437437+let list_build_logs ~log_dir ~run_id =
438438+ let dir = Filename.concat log_dir
439439+ (Filename.concat "runs" (Filename.concat run_id "build")) in
440440+ list_logs_in_dir dir
441441+442442+let list_doc_logs ~log_dir ~run_id =
443443+ let dir = Filename.concat log_dir
444444+ (Filename.concat "runs" (Filename.concat run_id "docs")) in
445445+ list_logs_in_dir dir
446446+```
447447+448448+**Step 7: Run tests to verify they pass**
449449+450450+Run: `dune exec tests/unit/test_run_data.exe`
451451+Expected: All 5 tests pass
452452+453453+**Step 8: Commit**
454454+455455+```bash
456456+git add web/data/ tests/unit/test_run_data.ml tests/unit/dune
457457+git commit -m "feat(web): add run data layer with tests"
458458+```
459459+460460+---
461461+462462+## Task 4: Data Layer - Package Data
463463+464464+**Files:**
465465+- Create: `/workspace/web/data/package_data.ml`
466466+- Create: `/workspace/web/data/package_data.mli`
467467+- Modify: `/workspace/web/data/dune`
468468+- Create: `/workspace/tests/unit/test_package_data.ml`
469469+- Modify: `/workspace/tests/unit/dune`
470470+471471+**Step 1: Write the failing test**
472472+473473+Create `/workspace/tests/unit/test_package_data.ml`:
474474+475475+```ocaml
476476+(** Unit tests for package data reading *)
477477+478478+let test_dir = ref ""
479479+480480+let setup () =
481481+ let dir = Filename.temp_dir "test-pkg-data-" "" in
482482+ test_dir := dir;
483483+ dir
484484+485485+let teardown () =
486486+ if !test_dir <> "" then begin
487487+ ignore (Sys.command (Printf.sprintf "rm -rf %s" !test_dir));
488488+ test_dir := ""
489489+ end
490490+491491+let mkdir_p path =
492492+ let rec create dir =
493493+ if not (Sys.file_exists dir) then begin
494494+ create (Filename.dirname dir);
495495+ try Unix.mkdir dir 0o755 with Unix.Unix_error (Unix.EEXIST, _, _) -> ()
496496+ end
497497+ in
498498+ create path
499499+500500+let write_file path content =
501501+ let dir = Filename.dirname path in
502502+ mkdir_p dir;
503503+ Out_channel.with_open_text path (fun oc -> Out_channel.output_string oc content)
504504+505505+(** Test: list_packages returns packages from html/p directory *)
506506+let test_list_packages () =
507507+ let base_dir = setup () in
508508+ let html_dir = Filename.concat base_dir "html" in
509509+ mkdir_p (Filename.concat html_dir "p/base/0.16.0");
510510+ mkdir_p (Filename.concat html_dir "p/base/0.15.0");
511511+ mkdir_p (Filename.concat html_dir "p/core/0.16.0");
512512+513513+ let packages = Day10_web_data.Package_data.list_packages ~html_dir in
514514+ assert (List.length packages = 3);
515515+ assert (List.mem ("base", "0.16.0") packages);
516516+ assert (List.mem ("base", "0.15.0") packages);
517517+ assert (List.mem ("core", "0.16.0") packages);
518518+519519+ teardown ();
520520+ Printf.printf "PASS: test_list_packages\n%!"
521521+522522+(** Test: list_package_versions returns versions for a package *)
523523+let test_list_package_versions () =
524524+ let base_dir = setup () in
525525+ let html_dir = Filename.concat base_dir "html" in
526526+ mkdir_p (Filename.concat html_dir "p/base/0.16.0");
527527+ mkdir_p (Filename.concat html_dir "p/base/0.15.0");
528528+ mkdir_p (Filename.concat html_dir "p/base/0.14.0");
529529+530530+ let versions = Day10_web_data.Package_data.list_package_versions ~html_dir ~name:"base" in
531531+ assert (List.length versions = 3);
532532+ (* Should be sorted descending *)
533533+ assert (List.hd versions = "0.16.0");
534534+535535+ teardown ();
536536+ Printf.printf "PASS: test_list_package_versions\n%!"
537537+538538+(** Test: package_has_docs checks if docs exist *)
539539+let test_package_has_docs () =
540540+ let base_dir = setup () in
541541+ let html_dir = Filename.concat base_dir "html" in
542542+ mkdir_p (Filename.concat html_dir "p/base/0.16.0");
543543+544544+ assert (Day10_web_data.Package_data.package_has_docs ~html_dir ~name:"base" ~version:"0.16.0");
545545+ assert (not (Day10_web_data.Package_data.package_has_docs ~html_dir ~name:"base" ~version:"0.15.0"));
546546+547547+ teardown ();
548548+ Printf.printf "PASS: test_package_has_docs\n%!"
549549+550550+(** Test: list_package_names returns unique package names *)
551551+let test_list_package_names () =
552552+ let base_dir = setup () in
553553+ let html_dir = Filename.concat base_dir "html" in
554554+ mkdir_p (Filename.concat html_dir "p/base/0.16.0");
555555+ mkdir_p (Filename.concat html_dir "p/base/0.15.0");
556556+ mkdir_p (Filename.concat html_dir "p/core/0.16.0");
557557+ mkdir_p (Filename.concat html_dir "p/async/0.16.0");
558558+559559+ let names = Day10_web_data.Package_data.list_package_names ~html_dir in
560560+ assert (List.length names = 3);
561561+ assert (List.mem "base" names);
562562+ assert (List.mem "core" names);
563563+ assert (List.mem "async" names);
564564+565565+ teardown ();
566566+ Printf.printf "PASS: test_list_package_names\n%!"
567567+568568+let () =
569569+ Printf.printf "Running Package_data tests...\n%!";
570570+ test_list_packages ();
571571+ test_list_package_versions ();
572572+ test_package_has_docs ();
573573+ test_list_package_names ();
574574+ Printf.printf "\nAll Package_data tests passed!\n%!"
575575+```
576576+577577+**Step 2: Add test to tests/unit/dune**
578578+579579+Add to `/workspace/tests/unit/dune`:
580580+581581+```dune
582582+(executable
583583+ (name test_package_data)
584584+ (libraries day10_web_data unix))
585585+```
586586+587587+**Step 3: Run test to verify it fails**
588588+589589+Run: `dune build tests/unit/test_package_data.exe 2>&1`
590590+Expected: FAIL with "Unbound module Package_data"
591591+592592+**Step 4: Update web/data/dune to include new module**
593593+594594+Update `/workspace/web/data/dune`:
595595+596596+```dune
597597+(library
598598+ (name day10_web_data)
599599+ (libraries unix yojson day10_lib)
600600+ (modules run_data package_data))
601601+```
602602+603603+**Step 5: Create package_data.mli interface**
604604+605605+Create `/workspace/web/data/package_data.mli`:
606606+607607+```ocaml
608608+(** Read package data from day10's html directory *)
609609+610610+(** List all (name, version) pairs with docs *)
611611+val list_packages : html_dir:string -> (string * string) list
612612+613613+(** List unique package names *)
614614+val list_package_names : html_dir:string -> string list
615615+616616+(** List all versions for a package name, sorted descending *)
617617+val list_package_versions : html_dir:string -> name:string -> string list
618618+619619+(** Check if docs exist for a package version *)
620620+val package_has_docs : html_dir:string -> name:string -> version:string -> bool
621621+622622+(** Get the docs URL path for a package *)
623623+val docs_path : name:string -> version:string -> string
624624+```
625625+626626+**Step 6: Implement package_data.ml**
627627+628628+Create `/workspace/web/data/package_data.ml`:
629629+630630+```ocaml
631631+(** Read package data from day10's html directory *)
632632+633633+let list_package_names ~html_dir =
634634+ let p_dir = Filename.concat html_dir "p" in
635635+ if Sys.file_exists p_dir && Sys.is_directory p_dir then
636636+ Sys.readdir p_dir
637637+ |> Array.to_list
638638+ |> List.filter (fun name ->
639639+ let path = Filename.concat p_dir name in
640640+ Sys.is_directory path)
641641+ |> List.sort String.compare
642642+ else
643643+ []
644644+645645+let compare_versions v1 v2 =
646646+ (* Simple version comparison - compare segments numerically where possible *)
647647+ let parse v =
648648+ String.split_on_char '.' v
649649+ |> List.map (fun s -> try `Int (int_of_string s) with _ -> `Str s)
650650+ in
651651+ let rec cmp l1 l2 = match l1, l2 with
652652+ | [], [] -> 0
653653+ | [], _ -> -1
654654+ | _, [] -> 1
655655+ | `Int a :: t1, `Int b :: t2 ->
656656+ let c = Int.compare a b in if c <> 0 then c else cmp t1 t2
657657+ | `Str a :: t1, `Str b :: t2 ->
658658+ let c = String.compare a b in if c <> 0 then c else cmp t1 t2
659659+ | `Int _ :: _, `Str _ :: _ -> -1
660660+ | `Str _ :: _, `Int _ :: _ -> 1
661661+ in
662662+ cmp (parse v2) (parse v1) (* Descending order *)
663663+664664+let list_package_versions ~html_dir ~name =
665665+ let pkg_dir = Filename.concat (Filename.concat html_dir "p") name in
666666+ if Sys.file_exists pkg_dir && Sys.is_directory pkg_dir then
667667+ Sys.readdir pkg_dir
668668+ |> Array.to_list
669669+ |> List.filter (fun version ->
670670+ let path = Filename.concat pkg_dir version in
671671+ Sys.is_directory path)
672672+ |> List.sort compare_versions
673673+ else
674674+ []
675675+676676+let list_packages ~html_dir =
677677+ list_package_names ~html_dir
678678+ |> List.concat_map (fun name ->
679679+ list_package_versions ~html_dir ~name
680680+ |> List.map (fun version -> (name, version)))
681681+682682+let package_has_docs ~html_dir ~name ~version =
683683+ let path = Filename.concat html_dir
684684+ (Filename.concat "p" (Filename.concat name version)) in
685685+ Sys.file_exists path && Sys.is_directory path
686686+687687+let docs_path ~name ~version =
688688+ Printf.sprintf "/docs/p/%s/%s/" name version
689689+```
690690+691691+**Step 7: Run tests to verify they pass**
692692+693693+Run: `dune exec tests/unit/test_package_data.exe`
694694+Expected: All 4 tests pass
695695+696696+**Step 8: Commit**
697697+698698+```bash
699699+git add web/data/ tests/unit/test_package_data.ml tests/unit/dune
700700+git commit -m "feat(web): add package data layer with tests"
701701+```
702702+703703+---
704704+705705+## Task 5: HTML Layout Module
706706+707707+**Files:**
708708+- Create: `/workspace/web/views/dune`
709709+- Create: `/workspace/web/views/layout.ml`
710710+- Modify: `/workspace/web/dune`
711711+712712+**Step 1: Create web/views/dune**
713713+714714+Create `/workspace/web/views/dune`:
715715+716716+```dune
717717+(library
718718+ (name day10_web_views)
719719+ (libraries dream day10_web_data))
720720+```
721721+722722+**Step 2: Create layout.ml with base HTML structure**
723723+724724+Create `/workspace/web/views/layout.ml`:
725725+726726+```ocaml
727727+(** Common HTML layout components *)
728728+729729+let head ~title =
730730+ Printf.sprintf {|<!DOCTYPE html>
731731+<html lang="en">
732732+<head>
733733+ <meta charset="UTF-8">
734734+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
735735+ <title>%s - day10</title>
736736+ <style>
737737+ :root {
738738+ --bg: #1a1a2e;
739739+ --bg-card: #16213e;
740740+ --text: #eee;
741741+ --text-muted: #888;
742742+ --accent: #0f3460;
743743+ --success: #2ecc71;
744744+ --error: #e74c3c;
745745+ --warning: #f39c12;
746746+ --border: #333;
747747+ }
748748+ * { box-sizing: border-box; margin: 0; padding: 0; }
749749+ body {
750750+ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
751751+ background: var(--bg);
752752+ color: var(--text);
753753+ line-height: 1.6;
754754+ }
755755+ .container { max-width: 1200px; margin: 0 auto; padding: 1rem; }
756756+ nav {
757757+ background: var(--bg-card);
758758+ border-bottom: 1px solid var(--border);
759759+ padding: 1rem;
760760+ }
761761+ nav a { color: var(--text); text-decoration: none; margin-right: 1.5rem; }
762762+ nav a:hover { text-decoration: underline; }
763763+ nav .brand { font-weight: bold; font-size: 1.2rem; }
764764+ h1, h2, h3 { margin-bottom: 1rem; }
765765+ .card {
766766+ background: var(--bg-card);
767767+ border-radius: 8px;
768768+ padding: 1.5rem;
769769+ margin-bottom: 1rem;
770770+ }
771771+ .grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); gap: 1rem; }
772772+ .stat { text-align: center; }
773773+ .stat-value { font-size: 2rem; font-weight: bold; }
774774+ .stat-label { color: var(--text-muted); font-size: 0.9rem; }
775775+ .badge {
776776+ display: inline-block;
777777+ padding: 0.25rem 0.5rem;
778778+ border-radius: 4px;
779779+ font-size: 0.85rem;
780780+ font-weight: 500;
781781+ }
782782+ .badge-success { background: var(--success); color: #fff; }
783783+ .badge-error { background: var(--error); color: #fff; }
784784+ .badge-warning { background: var(--warning); color: #000; }
785785+ table { width: 100%%; border-collapse: collapse; }
786786+ th, td { padding: 0.75rem; text-align: left; border-bottom: 1px solid var(--border); }
787787+ th { color: var(--text-muted); font-weight: 500; }
788788+ a { color: #5dade2; }
789789+ pre {
790790+ background: #0d1117;
791791+ padding: 1rem;
792792+ border-radius: 4px;
793793+ overflow-x: auto;
794794+ font-size: 0.9rem;
795795+ }
796796+ input[type="search"] {
797797+ width: 100%%;
798798+ padding: 0.75rem;
799799+ background: var(--accent);
800800+ border: 1px solid var(--border);
801801+ border-radius: 4px;
802802+ color: var(--text);
803803+ margin-bottom: 1rem;
804804+ }
805805+ input[type="search"]:focus { outline: 2px solid #5dade2; }
806806+ </style>
807807+</head>
808808+<body>
809809+|} title
810810+811811+let nav () = {|
812812+<nav>
813813+ <div class="container">
814814+ <a href="/" class="brand">day10</a>
815815+ <a href="/packages">Packages</a>
816816+ <a href="/runs">Runs</a>
817817+ </div>
818818+</nav>
819819+|}
820820+821821+let footer () = {|
822822+</body>
823823+</html>
824824+|}
825825+826826+let page ~title ~content =
827827+ head ~title ^ nav () ^
828828+ {|<main class="container">|} ^ content ^ {|</main>|} ^
829829+ footer ()
830830+831831+let badge status =
832832+ match status with
833833+ | `Success -> {|<span class="badge badge-success">success</span>|}
834834+ | `Failed -> {|<span class="badge badge-error">failed</span>|}
835835+ | `Skipped -> {|<span class="badge badge-warning">skipped</span>|}
836836+837837+let stat ~value ~label =
838838+ Printf.sprintf {|<div class="stat"><div class="stat-value">%s</div><div class="stat-label">%s</div></div>|} value label
839839+```
840840+841841+**Step 3: Update web/dune to include views**
842842+843843+Update `/workspace/web/dune`:
844844+845845+```dune
846846+(executable
847847+ (name main)
848848+ (public_name day10-web)
849849+ (package day10-web)
850850+ (libraries dream day10_lib day10_web_data day10_web_views cmdliner unix yojson))
851851+```
852852+853853+**Step 4: Build to verify it compiles**
854854+855855+Run: `dune build`
856856+Expected: Builds successfully
857857+858858+**Step 5: Commit**
859859+860860+```bash
861861+git add web/views/ web/dune
862862+git commit -m "feat(web): add HTML layout module"
863863+```
864864+865865+---
866866+867867+## Task 6: Dashboard Page
868868+869869+**Files:**
870870+- Create: `/workspace/web/views/dashboard.ml`
871871+- Modify: `/workspace/web/views/dune`
872872+- Modify: `/workspace/web/main.ml`
873873+874874+**Step 1: Create dashboard.ml**
875875+876876+Create `/workspace/web/views/dashboard.ml`:
877877+878878+```ocaml
879879+(** Dashboard page view *)
880880+881881+let render ~log_dir ~html_dir =
882882+ let latest_run_id = Day10_web_data.Run_data.get_latest_run_id ~log_dir in
883883+ let latest_summary = match latest_run_id with
884884+ | Some run_id -> Day10_web_data.Run_data.read_summary ~log_dir ~run_id
885885+ | None -> None
886886+ in
887887+ let packages = Day10_web_data.Package_data.list_packages ~html_dir in
888888+ let total_packages = List.length packages in
889889+890890+ let stats_content = match latest_summary with
891891+ | Some s ->
892892+ let build_rate = if s.targets_requested > 0
893893+ then float_of_int s.build_success /. float_of_int s.targets_requested *. 100.0
894894+ else 0.0 in
895895+ let doc_rate = if s.build_success > 0
896896+ then float_of_int s.doc_success /. float_of_int s.build_success *. 100.0
897897+ else 0.0 in
898898+ Printf.sprintf {|
899899+ <div class="grid">
900900+ %s
901901+ %s
902902+ %s
903903+ %s
904904+ </div>
905905+ |}
906906+ (Layout.stat ~value:(string_of_int total_packages) ~label:"Packages with Docs")
907907+ (Layout.stat ~value:(Printf.sprintf "%.0f%%" build_rate) ~label:"Build Success Rate")
908908+ (Layout.stat ~value:(Printf.sprintf "%.0f%%" doc_rate) ~label:"Doc Success Rate")
909909+ (Layout.stat ~value:(Printf.sprintf "%.0fs" s.duration_seconds) ~label:"Last Run Duration")
910910+ | None ->
911911+ Printf.sprintf {|
912912+ <div class="grid">
913913+ %s
914914+ %s
915915+ </div>
916916+ <p style="color: var(--text-muted); margin-top: 1rem;">No runs recorded yet.</p>
917917+ |}
918918+ (Layout.stat ~value:(string_of_int total_packages) ~label:"Packages with Docs")
919919+ (Layout.stat ~value:"—" ~label:"No Runs Yet")
920920+ in
921921+922922+ let latest_run_content = match latest_summary with
923923+ | Some s ->
924924+ Printf.sprintf {|
925925+ <h2>Latest Run</h2>
926926+ <div class="card">
927927+ <p><strong>Run ID:</strong> <a href="/runs/%s">%s</a></p>
928928+ <p><strong>Started:</strong> %s</p>
929929+ <p><strong>Duration:</strong> %.0f seconds</p>
930930+ <table>
931931+ <tr><th>Metric</th><th>Count</th></tr>
932932+ <tr><td>Targets Requested</td><td>%d</td></tr>
933933+ <tr><td>Solutions Found</td><td>%d</td></tr>
934934+ <tr><td>Build Success</td><td>%d %s</td></tr>
935935+ <tr><td>Build Failed</td><td>%d %s</td></tr>
936936+ <tr><td>Doc Success</td><td>%d %s</td></tr>
937937+ <tr><td>Doc Failed</td><td>%d %s</td></tr>
938938+ <tr><td>Doc Skipped</td><td>%d %s</td></tr>
939939+ </table>
940940+ %s
941941+ </div>
942942+ |}
943943+ s.run_id s.run_id
944944+ s.start_time
945945+ s.duration_seconds
946946+ s.targets_requested
947947+ s.solutions_found
948948+ s.build_success (if s.build_success > 0 then Layout.badge `Success else "")
949949+ s.build_failed (if s.build_failed > 0 then Layout.badge `Failed else "")
950950+ s.doc_success (if s.doc_success > 0 then Layout.badge `Success else "")
951951+ s.doc_failed (if s.doc_failed > 0 then Layout.badge `Failed else "")
952952+ s.doc_skipped (if s.doc_skipped > 0 then Layout.badge `Skipped else "")
953953+ (if List.length s.failures > 0 then
954954+ Printf.sprintf {|
955955+ <h3 style="margin-top: 1rem;">Failures (%d)</h3>
956956+ <table>
957957+ <tr><th>Package</th><th>Error</th></tr>
958958+ %s
959959+ </table>
960960+ |} (List.length s.failures)
961961+ (s.failures |> List.map (fun (pkg, err) ->
962962+ Printf.sprintf "<tr><td><a href=\"/packages/%s\">%s</a></td><td>%s</td></tr>"
963963+ (String.concat "/" (String.split_on_char '.' pkg)) pkg err
964964+ ) |> String.concat "\n")
965965+ else "")
966966+ | None -> ""
967967+ in
968968+969969+ let content = Printf.sprintf {|
970970+ <h1>Dashboard</h1>
971971+ <div class="card">
972972+ %s
973973+ </div>
974974+ %s
975975+ |} stats_content latest_run_content
976976+ in
977977+ Layout.page ~title:"Dashboard" ~content
978978+```
979979+980980+**Step 2: Update web/views/dune**
981981+982982+Update `/workspace/web/views/dune`:
983983+984984+```dune
985985+(library
986986+ (name day10_web_views)
987987+ (libraries dream day10_web_data)
988988+ (modules layout dashboard))
989989+```
990990+991991+**Step 3: Update main.ml to use dashboard**
992992+993993+Update the router in `/workspace/web/main.ml`:
994994+995995+```ocaml
996996+open Cmdliner
997997+998998+let cache_dir =
999999+ let doc = "Path to day10's cache directory" in
10001000+ Arg.(required & opt (some dir) None & info ["cache-dir"] ~docv:"DIR" ~doc)
10011001+10021002+let html_dir =
10031003+ let doc = "Path to generated documentation directory" in
10041004+ Arg.(required & opt (some dir) None & info ["html-dir"] ~docv:"DIR" ~doc)
10051005+10061006+let port =
10071007+ let doc = "HTTP port to listen on" in
10081008+ Arg.(value & opt int 8080 & info ["port"; "p"] ~docv:"PORT" ~doc)
10091009+10101010+let host =
10111011+ let doc = "Host address to bind to" in
10121012+ Arg.(value & opt string "127.0.0.1" & info ["host"] ~docv:"HOST" ~doc)
10131013+10141014+let platform =
10151015+ let doc = "Platform subdirectory in cache" in
10161016+ Arg.(value & opt string "debian-12-x86_64" & info ["platform"] ~docv:"PLATFORM" ~doc)
10171017+10181018+type config = {
10191019+ cache_dir : string;
10201020+ html_dir : string;
10211021+ port : int;
10221022+ host : string;
10231023+ platform : string;
10241024+}
10251025+10261026+let log_dir config = Filename.concat config.cache_dir "logs"
10271027+10281028+let run_server config =
10291029+ Dream.run ~port:config.port ~interface:config.host
10301030+ @@ Dream.logger
10311031+ @@ Dream.router [
10321032+ Dream.get "/" (fun _ ->
10331033+ let html = Day10_web_views.Dashboard.render
10341034+ ~log_dir:(log_dir config)
10351035+ ~html_dir:config.html_dir in
10361036+ Dream.html html);
10371037+ ]
10381038+10391039+let main cache_dir html_dir port host platform =
10401040+ let config = { cache_dir; html_dir; port; host; platform } in
10411041+ run_server config
10421042+10431043+let cmd =
10441044+ let doc = "Web dashboard for day10 documentation status" in
10451045+ let info = Cmd.info "day10-web" ~version:"0.1.0" ~doc in
10461046+ Cmd.v info Term.(const main $ cache_dir $ html_dir $ port $ host $ platform)
10471047+10481048+let () = exit (Cmd.eval cmd)
10491049+```
10501050+10511051+**Step 4: Build and verify**
10521052+10531053+Run: `dune build`
10541054+Expected: Builds successfully
10551055+10561056+**Step 5: Commit**
10571057+10581058+```bash
10591059+git add web/views/ web/main.ml
10601060+git commit -m "feat(web): add dashboard page"
10611061+```
10621062+10631063+---
10641064+10651065+## Task 7: Runs Pages
10661066+10671067+**Files:**
10681068+- Create: `/workspace/web/views/runs.ml`
10691069+- Modify: `/workspace/web/views/dune`
10701070+- Modify: `/workspace/web/main.ml`
10711071+10721072+**Step 1: Create runs.ml**
10731073+10741074+Create `/workspace/web/views/runs.ml`:
10751075+10761076+```ocaml
10771077+(** Run history and detail pages *)
10781078+10791079+let list_page ~log_dir =
10801080+ let runs = Day10_web_data.Run_data.list_runs ~log_dir in
10811081+ let rows = runs |> List.map (fun run_id ->
10821082+ let summary = Day10_web_data.Run_data.read_summary ~log_dir ~run_id in
10831083+ match summary with
10841084+ | Some s ->
10851085+ Printf.sprintf {|
10861086+ <tr>
10871087+ <td><a href="/runs/%s">%s</a></td>
10881088+ <td>%s</td>
10891089+ <td>%.0fs</td>
10901090+ <td>%d %s</td>
10911091+ <td>%d %s</td>
10921092+ <td>%d %s</td>
10931093+ </tr>
10941094+ |} run_id run_id
10951095+ s.start_time
10961096+ s.duration_seconds
10971097+ s.build_success (if s.build_success > 0 then Layout.badge `Success else "")
10981098+ s.build_failed (if s.build_failed > 0 then Layout.badge `Failed else "")
10991099+ s.doc_success (if s.doc_success > 0 then Layout.badge `Success else "")
11001100+ | None ->
11011101+ Printf.sprintf {|<tr><td><a href="/runs/%s">%s</a></td><td colspan="5">Summary not available</td></tr>|} run_id run_id
11021102+ ) |> String.concat "\n" in
11031103+11041104+ let content = if List.length runs = 0 then
11051105+ {|<h1>Run History</h1><p class="card">No runs recorded yet.</p>|}
11061106+ else
11071107+ Printf.sprintf {|
11081108+ <h1>Run History</h1>
11091109+ <div class="card">
11101110+ <table>
11111111+ <tr>
11121112+ <th>Run ID</th>
11131113+ <th>Started</th>
11141114+ <th>Duration</th>
11151115+ <th>Builds</th>
11161116+ <th>Failed</th>
11171117+ <th>Docs</th>
11181118+ </tr>
11191119+ %s
11201120+ </table>
11211121+ </div>
11221122+ |} rows
11231123+ in
11241124+ Layout.page ~title:"Run History" ~content
11251125+11261126+let detail_page ~log_dir ~run_id =
11271127+ match Day10_web_data.Run_data.read_summary ~log_dir ~run_id with
11281128+ | None ->
11291129+ Layout.page ~title:"Run Not Found" ~content:{|
11301130+ <h1>Run Not Found</h1>
11311131+ <p class="card">The requested run could not be found.</p>
11321132+ <p><a href="/runs">← Back to run history</a></p>
11331133+ |}
11341134+ | Some s ->
11351135+ let failures_table = if List.length s.failures > 0 then
11361136+ Printf.sprintf {|
11371137+ <h2>Failures (%d)</h2>
11381138+ <div class="card">
11391139+ <table>
11401140+ <tr><th>Package</th><th>Error</th><th>Logs</th></tr>
11411141+ %s
11421142+ </table>
11431143+ </div>
11441144+ |} (List.length s.failures)
11451145+ (s.failures |> List.map (fun (pkg, err) ->
11461146+ Printf.sprintf {|<tr>
11471147+ <td>%s</td>
11481148+ <td>%s</td>
11491149+ <td>
11501150+ <a href="/runs/%s/build/%s">build</a> |
11511151+ <a href="/runs/%s/docs/%s">docs</a>
11521152+ </td>
11531153+ </tr>|} pkg err run_id pkg run_id pkg
11541154+ ) |> String.concat "\n")
11551155+ else ""
11561156+ in
11571157+11581158+ let build_logs = Day10_web_data.Run_data.list_build_logs ~log_dir ~run_id in
11591159+ let logs_section = if List.length build_logs > 0 then
11601160+ Printf.sprintf {|
11611161+ <h2>Build Logs (%d)</h2>
11621162+ <div class="card">
11631163+ <ul>%s</ul>
11641164+ </div>
11651165+ |} (List.length build_logs)
11661166+ (build_logs |> List.map (fun pkg ->
11671167+ Printf.sprintf {|<li><a href="/runs/%s/build/%s">%s</a></li>|} run_id pkg pkg
11681168+ ) |> String.concat "\n")
11691169+ else ""
11701170+ in
11711171+11721172+ let content = Printf.sprintf {|
11731173+ <h1>Run %s</h1>
11741174+ <p><a href="/runs">← Back to run history</a></p>
11751175+11761176+ <div class="card">
11771177+ <h2>Summary</h2>
11781178+ <table>
11791179+ <tr><td>Started</td><td>%s</td></tr>
11801180+ <tr><td>Ended</td><td>%s</td></tr>
11811181+ <tr><td>Duration</td><td>%.0f seconds</td></tr>
11821182+ </table>
11831183+ </div>
11841184+11851185+ <div class="card">
11861186+ <h2>Results</h2>
11871187+ <div class="grid">
11881188+ %s %s %s %s %s %s %s
11891189+ </div>
11901190+ </div>
11911191+11921192+ %s
11931193+ %s
11941194+ |}
11951195+ run_id
11961196+ s.start_time s.end_time s.duration_seconds
11971197+ (Layout.stat ~value:(string_of_int s.targets_requested) ~label:"Targets")
11981198+ (Layout.stat ~value:(string_of_int s.solutions_found) ~label:"Solved")
11991199+ (Layout.stat ~value:(string_of_int s.build_success) ~label:"Build OK")
12001200+ (Layout.stat ~value:(string_of_int s.build_failed) ~label:"Build Failed")
12011201+ (Layout.stat ~value:(string_of_int s.doc_success) ~label:"Docs OK")
12021202+ (Layout.stat ~value:(string_of_int s.doc_failed) ~label:"Docs Failed")
12031203+ (Layout.stat ~value:(string_of_int s.doc_skipped) ~label:"Docs Skipped")
12041204+ failures_table
12051205+ logs_section
12061206+ in
12071207+ Layout.page ~title:(Printf.sprintf "Run %s" run_id) ~content
12081208+12091209+let log_page ~log_dir ~run_id ~log_type ~package =
12101210+ let content_opt = match log_type with
12111211+ | `Build -> Day10_web_data.Run_data.read_build_log ~log_dir ~run_id ~package
12121212+ | `Docs -> Day10_web_data.Run_data.read_doc_log ~log_dir ~run_id ~package
12131213+ in
12141214+ let type_str = match log_type with `Build -> "Build" | `Docs -> "Doc" in
12151215+ match content_opt with
12161216+ | None ->
12171217+ Layout.page ~title:"Log Not Found" ~content:(Printf.sprintf {|
12181218+ <h1>Log Not Found</h1>
12191219+ <p class="card">The requested log could not be found. It may have been garbage collected.</p>
12201220+ <p><a href="/runs/%s">← Back to run %s</a></p>
12211221+ |} run_id run_id)
12221222+ | Some content ->
12231223+ let escaped = content
12241224+ |> String.split_on_char '&' |> String.concat "&"
12251225+ |> String.split_on_char '<' |> String.concat "<"
12261226+ |> String.split_on_char '>' |> String.concat ">"
12271227+ in
12281228+ Layout.page ~title:(Printf.sprintf "%s Log: %s" type_str package) ~content:(Printf.sprintf {|
12291229+ <h1>%s Log: %s</h1>
12301230+ <p><a href="/runs/%s">← Back to run %s</a></p>
12311231+ <div class="card">
12321232+ <pre>%s</pre>
12331233+ </div>
12341234+ |} type_str package run_id run_id escaped)
12351235+```
12361236+12371237+**Step 2: Update web/views/dune**
12381238+12391239+Update `/workspace/web/views/dune`:
12401240+12411241+```dune
12421242+(library
12431243+ (name day10_web_views)
12441244+ (libraries dream day10_web_data)
12451245+ (modules layout dashboard runs))
12461246+```
12471247+12481248+**Step 3: Update main.ml with run routes**
12491249+12501250+Add routes to `/workspace/web/main.ml`:
12511251+12521252+```ocaml
12531253+open Cmdliner
12541254+12551255+let cache_dir =
12561256+ let doc = "Path to day10's cache directory" in
12571257+ Arg.(required & opt (some dir) None & info ["cache-dir"] ~docv:"DIR" ~doc)
12581258+12591259+let html_dir =
12601260+ let doc = "Path to generated documentation directory" in
12611261+ Arg.(required & opt (some dir) None & info ["html-dir"] ~docv:"DIR" ~doc)
12621262+12631263+let port =
12641264+ let doc = "HTTP port to listen on" in
12651265+ Arg.(value & opt int 8080 & info ["port"; "p"] ~docv:"PORT" ~doc)
12661266+12671267+let host =
12681268+ let doc = "Host address to bind to" in
12691269+ Arg.(value & opt string "127.0.0.1" & info ["host"] ~docv:"HOST" ~doc)
12701270+12711271+let platform =
12721272+ let doc = "Platform subdirectory in cache" in
12731273+ Arg.(value & opt string "debian-12-x86_64" & info ["platform"] ~docv:"PLATFORM" ~doc)
12741274+12751275+type config = {
12761276+ cache_dir : string;
12771277+ html_dir : string;
12781278+ port : int;
12791279+ host : string;
12801280+ platform : string;
12811281+}
12821282+12831283+let log_dir config = Filename.concat config.cache_dir "logs"
12841284+12851285+let run_server config =
12861286+ Dream.run ~port:config.port ~interface:config.host
12871287+ @@ Dream.logger
12881288+ @@ Dream.router [
12891289+ Dream.get "/" (fun _ ->
12901290+ let html = Day10_web_views.Dashboard.render
12911291+ ~log_dir:(log_dir config)
12921292+ ~html_dir:config.html_dir in
12931293+ Dream.html html);
12941294+12951295+ Dream.get "/runs" (fun _ ->
12961296+ let html = Day10_web_views.Runs.list_page ~log_dir:(log_dir config) in
12971297+ Dream.html html);
12981298+12991299+ Dream.get "/runs/:run_id" (fun request ->
13001300+ let run_id = Dream.param request "run_id" in
13011301+ let html = Day10_web_views.Runs.detail_page ~log_dir:(log_dir config) ~run_id in
13021302+ Dream.html html);
13031303+13041304+ Dream.get "/runs/:run_id/build/:package" (fun request ->
13051305+ let run_id = Dream.param request "run_id" in
13061306+ let package = Dream.param request "package" in
13071307+ let html = Day10_web_views.Runs.log_page
13081308+ ~log_dir:(log_dir config) ~run_id ~log_type:`Build ~package in
13091309+ Dream.html html);
13101310+13111311+ Dream.get "/runs/:run_id/docs/:package" (fun request ->
13121312+ let run_id = Dream.param request "run_id" in
13131313+ let package = Dream.param request "package" in
13141314+ let html = Day10_web_views.Runs.log_page
13151315+ ~log_dir:(log_dir config) ~run_id ~log_type:`Docs ~package in
13161316+ Dream.html html);
13171317+ ]
13181318+13191319+let main cache_dir html_dir port host platform =
13201320+ let config = { cache_dir; html_dir; port; host; platform } in
13211321+ run_server config
13221322+13231323+let cmd =
13241324+ let doc = "Web dashboard for day10 documentation status" in
13251325+ let info = Cmd.info "day10-web" ~version:"0.1.0" ~doc in
13261326+ Cmd.v info Term.(const main $ cache_dir $ html_dir $ port $ host $ platform)
13271327+13281328+let () = exit (Cmd.eval cmd)
13291329+```
13301330+13311331+**Step 4: Build and verify**
13321332+13331333+Run: `dune build`
13341334+Expected: Builds successfully
13351335+13361336+**Step 5: Commit**
13371337+13381338+```bash
13391339+git add web/views/ web/main.ml
13401340+git commit -m "feat(web): add run history and detail pages"
13411341+```
13421342+13431343+---
13441344+13451345+## Task 8: Packages Pages
13461346+13471347+**Files:**
13481348+- Create: `/workspace/web/views/packages.ml`
13491349+- Modify: `/workspace/web/views/dune`
13501350+- Modify: `/workspace/web/main.ml`
13511351+13521352+**Step 1: Create packages.ml**
13531353+13541354+Create `/workspace/web/views/packages.ml`:
13551355+13561356+```ocaml
13571357+(** Package list and detail pages *)
13581358+13591359+let list_page ~html_dir =
13601360+ let packages = Day10_web_data.Package_data.list_packages ~html_dir in
13611361+ let rows = packages |> List.map (fun (name, version) ->
13621362+ Printf.sprintf {|
13631363+ <tr>
13641364+ <td><a href="/packages/%s/%s">%s</a></td>
13651365+ <td>%s</td>
13661366+ <td>%s</td>
13671367+ <td><a href="/docs/p/%s/%s/">View Docs</a></td>
13681368+ </tr>
13691369+ |} name version name version (Layout.badge `Success) name version
13701370+ ) |> String.concat "\n" in
13711371+13721372+ let content = Printf.sprintf {|
13731373+ <h1>Packages</h1>
13741374+ <div class="card">
13751375+ <input type="search" id="pkg-search" placeholder="Search packages..." onkeyup="filterTable()">
13761376+ <table id="pkg-table">
13771377+ <thead>
13781378+ <tr>
13791379+ <th>Package</th>
13801380+ <th>Version</th>
13811381+ <th>Docs Status</th>
13821382+ <th>Links</th>
13831383+ </tr>
13841384+ </thead>
13851385+ <tbody>
13861386+ %s
13871387+ </tbody>
13881388+ </table>
13891389+ </div>
13901390+ <script>
13911391+ function filterTable() {
13921392+ const filter = document.getElementById('pkg-search').value.toLowerCase();
13931393+ const rows = document.querySelectorAll('#pkg-table tbody tr');
13941394+ rows.forEach(row => {
13951395+ const text = row.textContent.toLowerCase();
13961396+ row.style.display = text.includes(filter) ? '' : 'none';
13971397+ });
13981398+ }
13991399+ </script>
14001400+ |} rows
14011401+ in
14021402+ Layout.page ~title:"Packages" ~content
14031403+14041404+let detail_page ~html_dir ~name ~version =
14051405+ if not (Day10_web_data.Package_data.package_has_docs ~html_dir ~name ~version) then
14061406+ Layout.page ~title:"Package Not Found" ~content:(Printf.sprintf {|
14071407+ <h1>Package Not Found</h1>
14081408+ <p class="card">No documentation found for %s.%s</p>
14091409+ <p><a href="/packages">← Back to packages</a></p>
14101410+ |} name version)
14111411+ else
14121412+ let all_versions = Day10_web_data.Package_data.list_package_versions ~html_dir ~name in
14131413+ let versions_list = all_versions |> List.map (fun v ->
14141414+ if v = version then
14151415+ Printf.sprintf "<li><strong>%s</strong> (current)</li>" v
14161416+ else
14171417+ Printf.sprintf {|<li><a href="/packages/%s/%s">%s</a></li>|} name v v
14181418+ ) |> String.concat "\n" in
14191419+14201420+ let content = Printf.sprintf {|
14211421+ <h1>%s.%s</h1>
14221422+ <p><a href="/packages">← Back to packages</a></p>
14231423+14241424+ <div class="card">
14251425+ <h2>Documentation</h2>
14261426+ <p>%s</p>
14271427+ <p><a href="/docs/p/%s/%s/">View Documentation →</a></p>
14281428+ </div>
14291429+14301430+ <div class="card">
14311431+ <h2>Other Versions</h2>
14321432+ <ul>%s</ul>
14331433+ </div>
14341434+ |} name version (Layout.badge `Success) name version versions_list
14351435+ in
14361436+ Layout.page ~title:(Printf.sprintf "%s.%s" name version) ~content
14371437+```
14381438+14391439+**Step 2: Update web/views/dune**
14401440+14411441+Update `/workspace/web/views/dune`:
14421442+14431443+```dune
14441444+(library
14451445+ (name day10_web_views)
14461446+ (libraries dream day10_web_data)
14471447+ (modules layout dashboard runs packages))
14481448+```
14491449+14501450+**Step 3: Update main.ml with package routes**
14511451+14521452+Add routes to `/workspace/web/main.ml` (full file):
14531453+14541454+```ocaml
14551455+open Cmdliner
14561456+14571457+let cache_dir =
14581458+ let doc = "Path to day10's cache directory" in
14591459+ Arg.(required & opt (some dir) None & info ["cache-dir"] ~docv:"DIR" ~doc)
14601460+14611461+let html_dir =
14621462+ let doc = "Path to generated documentation directory" in
14631463+ Arg.(required & opt (some dir) None & info ["html-dir"] ~docv:"DIR" ~doc)
14641464+14651465+let port =
14661466+ let doc = "HTTP port to listen on" in
14671467+ Arg.(value & opt int 8080 & info ["port"; "p"] ~docv:"PORT" ~doc)
14681468+14691469+let host =
14701470+ let doc = "Host address to bind to" in
14711471+ Arg.(value & opt string "127.0.0.1" & info ["host"] ~docv:"HOST" ~doc)
14721472+14731473+let platform =
14741474+ let doc = "Platform subdirectory in cache" in
14751475+ Arg.(value & opt string "debian-12-x86_64" & info ["platform"] ~docv:"PLATFORM" ~doc)
14761476+14771477+type config = {
14781478+ cache_dir : string;
14791479+ html_dir : string;
14801480+ port : int;
14811481+ host : string;
14821482+ platform : string;
14831483+}
14841484+14851485+let log_dir config = Filename.concat config.cache_dir "logs"
14861486+14871487+let run_server config =
14881488+ Dream.run ~port:config.port ~interface:config.host
14891489+ @@ Dream.logger
14901490+ @@ Dream.router [
14911491+ Dream.get "/" (fun _ ->
14921492+ let html = Day10_web_views.Dashboard.render
14931493+ ~log_dir:(log_dir config)
14941494+ ~html_dir:config.html_dir in
14951495+ Dream.html html);
14961496+14971497+ Dream.get "/packages" (fun _ ->
14981498+ let html = Day10_web_views.Packages.list_page ~html_dir:config.html_dir in
14991499+ Dream.html html);
15001500+15011501+ Dream.get "/packages/:name/:version" (fun request ->
15021502+ let name = Dream.param request "name" in
15031503+ let version = Dream.param request "version" in
15041504+ let html = Day10_web_views.Packages.detail_page
15051505+ ~html_dir:config.html_dir ~name ~version in
15061506+ Dream.html html);
15071507+15081508+ Dream.get "/runs" (fun _ ->
15091509+ let html = Day10_web_views.Runs.list_page ~log_dir:(log_dir config) in
15101510+ Dream.html html);
15111511+15121512+ Dream.get "/runs/:run_id" (fun request ->
15131513+ let run_id = Dream.param request "run_id" in
15141514+ let html = Day10_web_views.Runs.detail_page ~log_dir:(log_dir config) ~run_id in
15151515+ Dream.html html);
15161516+15171517+ Dream.get "/runs/:run_id/build/:package" (fun request ->
15181518+ let run_id = Dream.param request "run_id" in
15191519+ let package = Dream.param request "package" in
15201520+ let html = Day10_web_views.Runs.log_page
15211521+ ~log_dir:(log_dir config) ~run_id ~log_type:`Build ~package in
15221522+ Dream.html html);
15231523+15241524+ Dream.get "/runs/:run_id/docs/:package" (fun request ->
15251525+ let run_id = Dream.param request "run_id" in
15261526+ let package = Dream.param request "package" in
15271527+ let html = Day10_web_views.Runs.log_page
15281528+ ~log_dir:(log_dir config) ~run_id ~log_type:`Docs ~package in
15291529+ Dream.html html);
15301530+ ]
15311531+15321532+let main cache_dir html_dir port host platform =
15331533+ let config = { cache_dir; html_dir; port; host; platform } in
15341534+ run_server config
15351535+15361536+let cmd =
15371537+ let doc = "Web dashboard for day10 documentation status" in
15381538+ let info = Cmd.info "day10-web" ~version:"0.1.0" ~doc in
15391539+ Cmd.v info Term.(const main $ cache_dir $ html_dir $ port $ host $ platform)
15401540+15411541+let () = exit (Cmd.eval cmd)
15421542+```
15431543+15441544+**Step 4: Build and verify**
15451545+15461546+Run: `dune build`
15471547+Expected: Builds successfully
15481548+15491549+**Step 5: Commit**
15501550+15511551+```bash
15521552+git add web/views/ web/main.ml
15531553+git commit -m "feat(web): add packages list and detail pages"
15541554+```
15551555+15561556+---
15571557+15581558+## Task 9: Update Admin Guide
15591559+15601560+**Files:**
15611561+- Modify: `/workspace/docs/ADMIN_GUIDE.md`
15621562+15631563+**Step 1: Add day10-web section to admin guide**
15641564+15651565+Add a new section after "Serving Documentation" in `/workspace/docs/ADMIN_GUIDE.md`:
15661566+15671567+```markdown
15681568+### Status Dashboard (day10-web)
15691569+15701570+day10-web provides a web interface for monitoring package build status:
15711571+15721572+```bash
15731573+# Install day10-web
15741574+opam install day10-web
15751575+15761576+# Run the dashboard
15771577+day10-web --cache-dir /data/cache --html-dir /data/html --port 8080
15781578+```
15791579+15801580+#### Systemd Service for day10-web
15811581+15821582+Create `/etc/systemd/system/day10-web.service`:
15831583+15841584+```ini
15851585+[Unit]
15861586+Description=day10 status dashboard
15871587+After=network.target
15881588+15891589+[Service]
15901590+Type=simple
15911591+User=www-data
15921592+ExecStart=/usr/local/bin/day10-web \
15931593+ --cache-dir /data/cache \
15941594+ --html-dir /data/html \
15951595+ --host 0.0.0.0 \
15961596+ --port 8080
15971597+Restart=always
15981598+15991599+[Install]
16001600+WantedBy=multi-user.target
16011601+```
16021602+16031603+Enable and start:
16041604+16051605+```bash
16061606+sudo systemctl enable day10-web
16071607+sudo systemctl start day10-web
16081608+```
16091609+16101610+#### Combined nginx Configuration
16111611+16121612+Serve both the dashboard and documentation:
16131613+16141614+```nginx
16151615+server {
16161616+ listen 80;
16171617+ server_name docs.example.com;
16181618+16191619+ # Status dashboard
16201620+ location / {
16211621+ proxy_pass http://127.0.0.1:8080;
16221622+ proxy_set_header Host $host;
16231623+ proxy_set_header X-Real-IP $remote_addr;
16241624+ }
16251625+16261626+ # Generated documentation
16271627+ location /docs/ {
16281628+ alias /data/html/;
16291629+ autoindex on;
16301630+ try_files $uri $uri/ =404;
16311631+ }
16321632+}
16331633+```
16341634+16351635+#### Dashboard Features
16361636+16371637+- **Dashboard** (`/`): Overview with build/doc success rates, latest run summary
16381638+- **Packages** (`/packages`): Searchable list of all packages with docs
16391639+- **Package Detail** (`/packages/{name}/{version}`): Version list and doc links
16401640+- **Runs** (`/runs`): History of all batch runs
16411641+- **Run Detail** (`/runs/{id}`): Statistics, failures, and log links
16421642+- **Logs** (`/runs/{id}/build/{pkg}`, `/runs/{id}/docs/{pkg}`): View build and doc logs
16431643+```
16441644+16451645+**Step 2: Commit**
16461646+16471647+```bash
16481648+git add docs/ADMIN_GUIDE.md
16491649+git commit -m "docs: add day10-web to admin guide"
16501650+```
16511651+16521652+---
16531653+16541654+## Summary
16551655+16561656+| Task | Description | Tests |
16571657+|------|-------------|-------|
16581658+| 1 | Project setup (dune-project, web/dune, minimal main.ml) | Build check |
16591659+| 2 | CLI with cmdliner | Help output |
16601660+| 3 | Data layer: run_data | 5 unit tests |
16611661+| 4 | Data layer: package_data | 4 unit tests |
16621662+| 5 | HTML layout module | Build check |
16631663+| 6 | Dashboard page | Manual verification |
16641664+| 7 | Runs pages (list, detail, logs) | Manual verification |
16651665+| 8 | Packages pages (list, detail) | Manual verification |
16661666+| 9 | Update admin guide | Documentation |
16671667+16681668+**Total new tests:** 9 unit tests
16691669+**Total commits:** 9 commits
+43
day10/dune-project
···11+(lang dune 3.17)
22+33+(name day10)
44+55+(generate_opam_files true)
66+77+(source
88+ (github username/reponame))
99+1010+(authors "Author Name <author@example.com>")
1111+1212+(maintainers "Maintainer Name <maintainer@example.com>")
1313+1414+(license LICENSE)
1515+1616+(documentation https://url/to/documentation)
1717+1818+(package
1919+ (name day10)
2020+ (synopsis "A short synopsis")
2121+ (description "A longer description")
2222+ (depends
2323+ (ocaml (>= 5.3.0))
2424+ dune
2525+ ppx_deriving_yojson
2626+ opam-0install
2727+ (cmdliner (< 2.0.0))
2828+ dockerfile)
2929+ (tags
3030+ ("add topics" "to describe" your project)))
3131+3232+(package
3333+ (name day10-web)
3434+ (synopsis "Web dashboard for day10 documentation status")
3535+ (description "Status dashboard for package maintainers and operators")
3636+ (depends
3737+ (ocaml (>= 5.3.0))
3838+ dune
3939+ dream
4040+ day10
4141+ cmdliner))
4242+4343+; See the complete stanza docs at https://dune.readthedocs.io/en/stable/reference/dune-project/index.html
+146
day10/lib/atomic_swap.ml
···11+(** Atomic directory swap for graceful degradation.
22+33+ This module provides atomic swap operations for documentation directories,
44+ implementing the "fresh docs with graceful degradation" pattern:
55+ - Write new docs to a staging directory ({dir}.new)
66+ - On success, atomically swap: old -> .old, new -> current, remove .old
77+ - On failure, leave original docs intact
88+99+ This is a standalone module that can be used for testing without
1010+ the full day10 dependency chain. *)
1111+1212+let log fmt = Printf.ksprintf (fun _ -> ()) fmt
1313+1414+let rec rm_rf path =
1515+ try
1616+ let stat = Unix.lstat path in
1717+ match stat.Unix.st_kind with
1818+ | Unix.S_DIR ->
1919+ Sys.readdir path |> Array.iter (fun f -> rm_rf (Filename.concat path f));
2020+ Unix.rmdir path
2121+ | _ -> Unix.unlink path
2222+ with
2323+ | Unix.Unix_error (Unix.ENOENT, _, _) -> ()
2424+ | Unix.Unix_error (Unix.EACCES, _, _) ->
2525+ (* Try with shell rm for permission issues *)
2626+ ignore (Sys.command (Printf.sprintf "rm -rf %s" (Filename.quote path)))
2727+2828+(** Clean up stale .new and .old directories from interrupted swaps.
2929+ Call this on startup before processing packages. *)
3030+let cleanup_stale_dirs ~html_dir =
3131+ let p_dir = Filename.concat html_dir "p" in
3232+ if Sys.file_exists p_dir && Sys.is_directory p_dir then begin
3333+ try
3434+ Sys.readdir p_dir |> Array.iter (fun pkg_name ->
3535+ let pkg_dir = Filename.concat p_dir pkg_name in
3636+ if Sys.is_directory pkg_dir then begin
3737+ try
3838+ Sys.readdir pkg_dir |> Array.iter (fun version_dir ->
3939+ (* Clean up .new directories - incomplete writes *)
4040+ if Filename.check_suffix version_dir ".new" then begin
4141+ let stale_new = Filename.concat pkg_dir version_dir in
4242+ log "Cleaning up stale .new directory: %s" stale_new;
4343+ rm_rf stale_new
4444+ end
4545+ (* Clean up .old directories - incomplete swap *)
4646+ else if Filename.check_suffix version_dir ".old" then begin
4747+ let stale_old = Filename.concat pkg_dir version_dir in
4848+ log "Cleaning up stale .old directory: %s" stale_old;
4949+ rm_rf stale_old
5050+ end
5151+ )
5252+ with _ -> ()
5353+ end
5454+ )
5555+ with _ -> ()
5656+ end;
5757+ (* Also clean up universe directories *)
5858+ let u_dir = Filename.concat html_dir "u" in
5959+ if Sys.file_exists u_dir && Sys.is_directory u_dir then begin
6060+ try
6161+ Sys.readdir u_dir |> Array.iter (fun universe_hash ->
6262+ let universe_dir = Filename.concat u_dir universe_hash in
6363+ if Sys.is_directory universe_dir then begin
6464+ try
6565+ Sys.readdir universe_dir |> Array.iter (fun pkg_name ->
6666+ let pkg_dir = Filename.concat universe_dir pkg_name in
6767+ if Sys.is_directory pkg_dir then begin
6868+ try
6969+ Sys.readdir pkg_dir |> Array.iter (fun version_dir ->
7070+ if Filename.check_suffix version_dir ".new" then begin
7171+ let stale_new = Filename.concat pkg_dir version_dir in
7272+ log "Cleaning up stale .new directory: %s" stale_new;
7373+ rm_rf stale_new
7474+ end
7575+ else if Filename.check_suffix version_dir ".old" then begin
7676+ let stale_old = Filename.concat pkg_dir version_dir in
7777+ log "Cleaning up stale .old directory: %s" stale_old;
7878+ rm_rf stale_old
7979+ end
8080+ )
8181+ with _ -> ()
8282+ end
8383+ )
8484+ with _ -> ()
8585+ end
8686+ )
8787+ with _ -> ()
8888+ end
8989+9090+(** Get paths for atomic swap operations.
9191+ Returns (staging_dir, final_dir, old_dir) where:
9292+ - staging_dir: {version}.new - where new docs are written
9393+ - final_dir: {version} - the live docs location
9494+ - old_dir: {version}.old - backup during swap *)
9595+let get_swap_paths ~html_dir ~pkg ~version ~blessed ~universe =
9696+ let base_dir =
9797+ if blessed then
9898+ Filename.concat (Filename.concat html_dir "p") pkg
9999+ else
100100+ Filename.concat (Filename.concat (Filename.concat html_dir "u") universe) pkg
101101+ in
102102+ let final_dir = Filename.concat base_dir version in
103103+ let staging_dir = final_dir ^ ".new" in
104104+ let old_dir = final_dir ^ ".old" in
105105+ (staging_dir, final_dir, old_dir)
106106+107107+(** Commit staging to final location atomically.
108108+ Performs the swap: final -> .old, staging -> final, remove .old
109109+ Returns true on success, false on failure. *)
110110+let commit ~html_dir ~pkg ~version ~blessed ~universe =
111111+ let staging_dir, final_dir, old_dir = get_swap_paths ~html_dir ~pkg ~version ~blessed ~universe in
112112+ if not (Sys.file_exists staging_dir) then begin
113113+ log "commit: staging directory does not exist: %s" staging_dir;
114114+ false
115115+ end else begin
116116+ log "commit: swapping %s -> %s" staging_dir final_dir;
117117+ try
118118+ (* Step 1: If final exists, move to .old *)
119119+ let has_existing = Sys.file_exists final_dir in
120120+ (if has_existing then begin
121121+ (* Remove any stale .old first *)
122122+ if Sys.file_exists old_dir then rm_rf old_dir;
123123+ Unix.rename final_dir old_dir
124124+ end);
125125+ (* Step 2: Move staging to final *)
126126+ Unix.rename staging_dir final_dir;
127127+ (* Step 3: Remove .old backup *)
128128+ if has_existing && Sys.file_exists old_dir then
129129+ rm_rf old_dir;
130130+ log "commit: successfully swapped docs for %s/%s" pkg version;
131131+ true
132132+ with
133133+ | Unix.Unix_error (err, _, _) ->
134134+ log "commit: failed: %s" (Unix.error_message err);
135135+ false
136136+ | _ -> false
137137+ end
138138+139139+(** Rollback staging on failure.
140140+ Removes the .new directory, leaving original docs intact. *)
141141+let rollback ~html_dir ~pkg ~version ~blessed ~universe =
142142+ let staging_dir, _, _ = get_swap_paths ~html_dir ~pkg ~version ~blessed ~universe in
143143+ if Sys.file_exists staging_dir then begin
144144+ log "rollback: removing staging directory %s" staging_dir;
145145+ rm_rf staging_dir
146146+ end
+154
day10/lib/build_lock.ml
···11+(** Lock files for tracking in-progress builds, doc generation, and tool builds.
22+33+ Lock files are automatically released when the process dies,
44+ providing reliable "in progress" detection without stale state.
55+66+ Lock file naming:
77+ locks/build-{package}.{version}-{universe}.lock
88+ locks/doc-{package}.{version}-{universe}.lock
99+ locks/tool-{name}.lock or locks/tool-{name}-{ocaml_version}.lock
1010+1111+ This module provides query functions for the web UI. The actual lock
1212+ acquisition is done by Os.create_directory_exclusively in bin/os.ml. *)
1313+1414+type stage = Build | Doc | Tool
1515+1616+type lock_info = {
1717+ stage : stage;
1818+ package : string;
1919+ version : string;
2020+ universe : string option; (* For Build/Doc: dependency hash. For Tool: OCaml version if applicable *)
2121+ pid : int;
2222+ start_time : float;
2323+ layer_name : string option; (* Final layer directory name for finding logs after completion *)
2424+ temp_log_path : string option; (* Temp log path for viewing live logs during build *)
2525+}
2626+2727+let locks_subdir = "locks"
2828+2929+(** Parse lock filename to extract info.
3030+ Returns None if filename doesn't match expected pattern. *)
3131+let parse_lock_filename filename =
3232+ (* Remove .lock suffix *)
3333+ if not (Filename.check_suffix filename ".lock") then None
3434+ else
3535+ let base = Filename.chop_suffix filename ".lock" in
3636+ (* Helper to parse {package}.{version}-{universe} or {package}.{version} *)
3737+ let parse_pkg_ver_universe rest =
3838+ (* Find the last dash that separates universe (32 hex chars) *)
3939+ match String.rindex_opt rest '-' with
4040+ | Some i when String.length rest - i - 1 = 32 ->
4141+ (* Has universe hash *)
4242+ let pkg_ver = String.sub rest 0 i in
4343+ let universe = String.sub rest (i + 1) (String.length rest - i - 1) in
4444+ (match String.rindex_opt pkg_ver '.' with
4545+ | None -> None
4646+ | Some j ->
4747+ let package = String.sub pkg_ver 0 j in
4848+ let version = String.sub pkg_ver (j + 1) (String.length pkg_ver - j - 1) in
4949+ Some (package, version, Some universe))
5050+ | _ ->
5151+ (* No universe *)
5252+ (match String.rindex_opt rest '.' with
5353+ | None -> None
5454+ | Some i ->
5555+ let package = String.sub rest 0 i in
5656+ let version = String.sub rest (i + 1) (String.length rest - i - 1) in
5757+ Some (package, version, None))
5858+ in
5959+ if String.length base > 6 && String.sub base 0 6 = "build-" then
6060+ (* build-{package}.{version}-{universe} or build-{package}.{version} *)
6161+ let rest = String.sub base 6 (String.length base - 6) in
6262+ parse_pkg_ver_universe rest
6363+ |> Option.map (fun (package, version, universe) -> (Build, package, version, universe))
6464+ else if String.length base > 4 && String.sub base 0 4 = "doc-" then
6565+ (* doc-{package}.{version}-{universe} or doc-{package}.{version} *)
6666+ let rest = String.sub base 4 (String.length base - 4) in
6767+ parse_pkg_ver_universe rest
6868+ |> Option.map (fun (package, version, universe) -> (Doc, package, version, universe))
6969+ else if String.length base > 5 && String.sub base 0 5 = "tool-" then
7070+ (* tool-{name} or tool-{name}-{ocaml_version} *)
7171+ let rest = String.sub base 5 (String.length base - 5) in
7272+ (* Check for OCaml version suffix (e.g., -5.2.1) *)
7373+ match String.rindex_opt rest '-' with
7474+ | Some i ->
7575+ let name = String.sub rest 0 i in
7676+ let ocaml_ver = String.sub rest (i + 1) (String.length rest - i - 1) in
7777+ (* Simple check: OCaml versions contain dots *)
7878+ if String.contains ocaml_ver '.' then
7979+ Some (Tool, name, "0", Some ocaml_ver)
8080+ else
8181+ (* Not an OCaml version, treat whole thing as name *)
8282+ Some (Tool, rest, "0", None)
8383+ | None ->
8484+ Some (Tool, rest, "0", None)
8585+ else
8686+ None
8787+8888+(** Check if a lock file is currently held (locked by another process) *)
8989+let is_lock_held lock_path =
9090+ try
9191+ let fd = Unix.openfile lock_path [Unix.O_RDONLY] 0o644 in
9292+ let held =
9393+ try Unix.lockf fd Unix.F_TEST 0; false
9494+ with Unix.Unix_error (Unix.EAGAIN, _, _) | Unix.Unix_error (Unix.EACCES, _, _) -> true
9595+ in
9696+ Unix.close fd;
9797+ held
9898+ with Unix.Unix_error _ -> false
9999+100100+(** List all currently held locks.
101101+ Returns lock info for each active lock. *)
102102+let list_active ~cache_dir =
103103+ let locks_dir = Filename.concat cache_dir locks_subdir in
104104+ if not (Sys.file_exists locks_dir) then []
105105+ else
106106+ try
107107+ Sys.readdir locks_dir
108108+ |> Array.to_list
109109+ |> List.filter (fun name -> Filename.check_suffix name ".lock")
110110+ |> List.filter_map (fun filename ->
111111+ let path = Filename.concat locks_dir filename in
112112+ match parse_lock_filename filename with
113113+ | None -> None
114114+ | Some (stage, package, version, universe) ->
115115+ if is_lock_held path then
116116+ try
117117+ let content = In_channel.with_open_text path In_channel.input_all in
118118+ let lines = String.split_on_char '\n' content in
119119+ (* Lock file format:
120120+ Line 1: PID
121121+ Line 2: start time
122122+ Line 3: layer name (may be empty)
123123+ Line 4: temp log path (may be empty) *)
124124+ match lines with
125125+ | pid_str :: time_str :: rest ->
126126+ let pid = int_of_string (String.trim pid_str) in
127127+ let start_time = float_of_string (String.trim time_str) in
128128+ let layer_name = match rest with
129129+ | s :: _ when String.trim s <> "" -> Some (String.trim s)
130130+ | _ -> None
131131+ in
132132+ let temp_log_path = match rest with
133133+ | _ :: s :: _ when String.trim s <> "" -> Some (String.trim s)
134134+ | _ -> None
135135+ in
136136+ Some { stage; package; version; universe; pid; start_time; layer_name; temp_log_path }
137137+ | _ -> None
138138+ with _ -> None
139139+ else None)
140140+ with _ -> []
141141+142142+(** Clean up stale lock files (files that exist but aren't locked).
143143+ This is safe to run anytime - it only removes unlocked files. *)
144144+let cleanup_stale ~cache_dir =
145145+ let locks_dir = Filename.concat cache_dir locks_subdir in
146146+ if Sys.file_exists locks_dir then
147147+ try
148148+ Sys.readdir locks_dir
149149+ |> Array.iter (fun filename ->
150150+ if Filename.check_suffix filename ".lock" then
151151+ let path = Filename.concat locks_dir filename in
152152+ if not (is_lock_held path) then
153153+ try Unix.unlink path with _ -> ())
154154+ with _ -> ()
+26
day10/lib/build_lock.mli
···11+(** Lock files for tracking in-progress builds, doc generation, and tool builds.
22+33+ Lock files are automatically released when the process dies,
44+ providing reliable "in progress" detection without stale state.
55+66+ This module provides query functions for the web UI. The actual lock
77+ acquisition is done by Os.create_directory_exclusively in bin/os.ml. *)
88+99+type stage = Build | Doc | Tool
1010+1111+type lock_info = {
1212+ stage : stage;
1313+ package : string;
1414+ version : string;
1515+ universe : string option;
1616+ pid : int;
1717+ start_time : float;
1818+ layer_name : string option; (** Final layer directory name *)
1919+ temp_log_path : string option; (** Temp log path for live viewing *)
2020+}
2121+2222+(** List all currently held locks. *)
2323+val list_active : cache_dir:string -> lock_info list
2424+2525+(** Clean up stale lock files (unlocked files left from previous runs). *)
2626+val cleanup_stale : cache_dir:string -> unit
···11+(** Garbage collection for layer cache and universe directories.
22+33+ Layer GC: After each batch run, delete layers not referenced by current
44+ solutions. This is aggressive - we don't keep history since regeneration
55+ is fast with layer caching.
66+77+ Universe GC: Delete universe directories not referenced by any blessed
88+ package's universes.json file. This preserves universes until all their
99+ packages have successfully moved to new universes. *)
1010+1111+(** Types for GC results *)
1212+type layer_gc_result = {
1313+ referenced : int;
1414+ deleted : int;
1515+ kept : string list; (** Special layers that are always kept *)
1616+}
1717+1818+type universe_gc_result = {
1919+ referenced : int;
2020+ deleted : int;
2121+}
2222+2323+let log fmt = Printf.ksprintf (fun msg ->
2424+ Printf.printf "[gc] %s\n%!" msg
2525+) fmt
2626+2727+let rm_rf path =
2828+ let ret = Sys.command (Printf.sprintf "rm -rf %s 2>/dev/null" (Filename.quote path)) in
2929+ if ret <> 0 then
3030+ ignore (Sys.command (Printf.sprintf "sudo rm -rf %s" (Filename.quote path)))
3131+3232+(** List all layer directories in the cache.
3333+ Returns a list of (layer_name, full_path) pairs. *)
3434+let list_layers ~cache_dir ~os_key =
3535+ let layer_dir = Filename.concat cache_dir os_key in
3636+ if Sys.file_exists layer_dir && Sys.is_directory layer_dir then
3737+ try
3838+ Sys.readdir layer_dir
3939+ |> Array.to_list
4040+ |> List.filter_map (fun name ->
4141+ let path = Filename.concat layer_dir name in
4242+ if Sys.is_directory path then Some (name, path) else None)
4343+ with _ -> []
4444+ else
4545+ []
4646+4747+(** Check if a layer is a special layer that should always be kept.
4848+ Special layers:
4949+ - "base": The base image
5050+ - "doc-driver-*": Shared doc driver layer
5151+ - "doc-odoc-*": Per-version odoc layers
5252+ - "solutions": Solution cache directory *)
5353+let is_special_layer name =
5454+ name = "base" ||
5555+ name = "solutions" ||
5656+ (String.length name > 11 && String.sub name 0 11 = "doc-driver-") ||
5757+ (String.length name > 9 && String.sub name 0 9 = "doc-odoc-") ||
5858+ (String.length name > 10 && String.sub name 0 10 = "jtw-tools-")
5959+6060+(** Perform layer GC.
6161+ [referenced_hashes] should be a list of build-{hash} and doc-{hash}
6262+ layer names that are currently in use. All other layers will be deleted. *)
6363+let gc_layers ~cache_dir ~os_key ~referenced_hashes =
6464+ let all_layers = list_layers ~cache_dir ~os_key in
6565+ let referenced_set = referenced_hashes in
6666+6767+ let kept_special = ref [] in
6868+ let deleted_count = ref 0 in
6969+7070+ List.iter (fun (name, path) ->
7171+ if is_special_layer name then begin
7272+ kept_special := name :: !kept_special
7373+ end else if List.mem name referenced_set then begin
7474+ (* Referenced - keep it *)
7575+ ()
7676+ end else begin
7777+ (* Not referenced - delete it *)
7878+ log "Deleting unreferenced layer: %s" name;
7979+ rm_rf path;
8080+ incr deleted_count
8181+ end
8282+ ) all_layers;
8383+8484+ {
8585+ referenced = List.length referenced_hashes;
8686+ deleted = !deleted_count;
8787+ kept = !kept_special;
8888+ }
8989+9090+(** Collect all universe hashes referenced by blessed packages.
9191+ Scans html/p/*/*/universes.json files to find which universes
9292+ are still needed. *)
9393+let collect_referenced_universes ~html_dir =
9494+ let p_dir = Filename.concat html_dir "p" in
9595+ let universes = ref [] in
9696+ if Sys.file_exists p_dir && Sys.is_directory p_dir then begin
9797+ try
9898+ Sys.readdir p_dir |> Array.iter (fun pkg_name ->
9999+ let pkg_dir = Filename.concat p_dir pkg_name in
100100+ if Sys.is_directory pkg_dir then begin
101101+ try
102102+ Sys.readdir pkg_dir |> Array.iter (fun version ->
103103+ let version_dir = Filename.concat pkg_dir version in
104104+ if Sys.is_directory version_dir then begin
105105+ let universes_file = Filename.concat version_dir "universes.json" in
106106+ if Sys.file_exists universes_file then begin
107107+ try
108108+ let ic = open_in universes_file in
109109+ let content = really_input_string ic (in_channel_length ic) in
110110+ close_in ic;
111111+ (* Parse JSON - simple extraction of universe hashes *)
112112+ (* Expected format: {"universes": ["hash1", "hash2", ...]} *)
113113+ (* Match quoted hex strings, then filter to 32-char hashes *)
114114+ let regex = Str.regexp {|"[a-f0-9]+"|} in
115115+ let rec find_all start =
116116+ try
117117+ let _ = Str.search_forward regex content start in
118118+ let matched = Str.matched_string content in
119119+ let hash = String.sub matched 1 (String.length matched - 2) in
120120+ if String.length hash = 32 then
121121+ hash :: find_all (Str.match_end ())
122122+ else
123123+ find_all (Str.match_end ())
124124+ with Not_found -> []
125125+ in
126126+ universes := find_all 0 @ !universes
127127+ with _ -> ()
128128+ end
129129+ end
130130+ )
131131+ with _ -> ()
132132+ end
133133+ )
134134+ with _ -> ()
135135+ end;
136136+ !universes |> List.sort_uniq String.compare
137137+138138+(** Check if a universe directory contains any package docs.
139139+ Returns true if the universe has at least one package with doc content. *)
140140+let universe_has_content universe_path =
141141+ try
142142+ Sys.readdir universe_path
143143+ |> Array.exists (fun pkg_name ->
144144+ let pkg_path = Filename.concat universe_path pkg_name in
145145+ Sys.is_directory pkg_path &&
146146+ (* Check if package directory has any version subdirs with content *)
147147+ try
148148+ Sys.readdir pkg_path
149149+ |> Array.exists (fun version ->
150150+ let version_path = Filename.concat pkg_path version in
151151+ Sys.is_directory version_path)
152152+ with _ -> false)
153153+ with _ -> false
154154+155155+(** Perform universe GC.
156156+ Deletes universe directories in html/u/ that:
157157+ 1. Are not referenced by any blessed package's universes.json file, AND
158158+ 2. Are empty (no package docs inside).
159159+160160+ This prevents deletion of non-blessed packages' docs, which would
161161+ otherwise be lost since they don't write universes.json references. *)
162162+let gc_universes ~html_dir =
163163+ let referenced = collect_referenced_universes ~html_dir in
164164+ let u_dir = Filename.concat html_dir "u" in
165165+ let deleted_count = ref 0 in
166166+ let kept_with_content = ref 0 in
167167+168168+ if Sys.file_exists u_dir && Sys.is_directory u_dir then begin
169169+ try
170170+ Sys.readdir u_dir |> Array.iter (fun universe_hash ->
171171+ let path = Filename.concat u_dir universe_hash in
172172+ if Sys.is_directory path then begin
173173+ if List.mem universe_hash referenced then
174174+ () (* Referenced by blessed package - keep it *)
175175+ else if universe_has_content path then begin
176176+ (* Has docs but not referenced - keep it (non-blessed packages) *)
177177+ incr kept_with_content
178178+ end else begin
179179+ (* Empty and unreferenced - safe to delete *)
180180+ log "Deleting empty unreferenced universe: %s" universe_hash;
181181+ rm_rf path;
182182+ incr deleted_count
183183+ end
184184+ end
185185+ )
186186+ with _ -> ()
187187+ end;
188188+189189+ if !kept_with_content > 0 then
190190+ log "Kept %d universes with non-blessed package docs" !kept_with_content;
191191+192192+ {
193193+ referenced = List.length referenced;
194194+ deleted = !deleted_count;
195195+ }
196196+197197+(** Perform full GC (layers + universes). *)
198198+let gc_all ~cache_dir ~os_key ~html_dir ~referenced_layer_hashes =
199199+ log "Starting garbage collection...";
200200+201201+ let layer_result = gc_layers ~cache_dir ~os_key ~referenced_hashes:referenced_layer_hashes in
202202+ log "Layer GC: %d referenced, %d deleted, %d special layers kept"
203203+ layer_result.referenced layer_result.deleted (List.length layer_result.kept);
204204+205205+ let universe_result = gc_universes ~html_dir in
206206+ log "Universe GC: %d referenced, %d deleted"
207207+ universe_result.referenced universe_result.deleted;
208208+209209+ (layer_result, universe_result)
+31
day10/lib/gc.mli
···11+(** Garbage collection for layer cache and universe directories. *)
22+33+(** Types for GC results *)
44+type layer_gc_result = {
55+ referenced : int;
66+ deleted : int;
77+ kept : string list;
88+}
99+1010+type universe_gc_result = {
1111+ referenced : int;
1212+ deleted : int;
1313+}
1414+1515+(** Check if a layer is a special layer that should always be kept. *)
1616+val is_special_layer : string -> bool
1717+1818+(** List all layer directories in the cache. *)
1919+val list_layers : cache_dir:string -> os_key:string -> (string * string) list
2020+2121+(** Perform layer GC. *)
2222+val gc_layers : cache_dir:string -> os_key:string -> referenced_hashes:string list -> layer_gc_result
2323+2424+(** Collect all universe hashes referenced by blessed packages. *)
2525+val collect_referenced_universes : html_dir:string -> string list
2626+2727+(** Perform universe GC. *)
2828+val gc_universes : html_dir:string -> universe_gc_result
2929+3030+(** Perform full GC (layers + universes). *)
3131+val gc_all : cache_dir:string -> os_key:string -> html_dir:string -> referenced_layer_hashes:string list -> layer_gc_result * universe_gc_result
+99
day10/lib/progress.ml
···11+(** Progress tracking for batch builds.
22+33+ Writes progress.json early (after solving phase) and updates it during
44+ the build phase so the dashboard can show real-time progress.
55+*)
66+77+(** Build phases *)
88+type phase =
99+ | Solving
1010+ | Blessings
1111+ | Building
1212+ | Gc
1313+ | Completed
1414+1515+let phase_to_string = function
1616+ | Solving -> "solving"
1717+ | Blessings -> "blessings"
1818+ | Building -> "building"
1919+ | Gc -> "gc"
2020+ | Completed -> "completed"
2121+2222+(** Progress state *)
2323+type t = {
2424+ run_id : string;
2525+ start_time : string;
2626+ phase : phase;
2727+ targets : string list;
2828+ solutions_found : int;
2929+ solutions_failed : int;
3030+ build_completed : int;
3131+ build_total : int;
3232+ doc_completed : int;
3333+ doc_total : int;
3434+}
3535+3636+(** Create initial progress state *)
3737+let create ~run_id ~start_time ~targets =
3838+ {
3939+ run_id;
4040+ start_time;
4141+ phase = Solving;
4242+ targets;
4343+ solutions_found = 0;
4444+ solutions_failed = 0;
4545+ build_completed = 0;
4646+ build_total = 0;
4747+ doc_completed = 0;
4848+ doc_total = 0;
4949+ }
5050+5151+(** Update the phase *)
5252+let set_phase t phase = { t with phase }
5353+5454+(** Update solutions count *)
5555+let set_solutions t ~found ~failed =
5656+ { t with solutions_found = found; solutions_failed = failed }
5757+5858+(** Update build totals (call when entering build phase) *)
5959+let set_build_total t total = { t with build_total = total; doc_total = total }
6060+6161+(** Increment build completed count *)
6262+let incr_build_completed t = { t with build_completed = t.build_completed + 1 }
6363+6464+(** Increment doc completed count *)
6565+let incr_doc_completed t = { t with doc_completed = t.doc_completed + 1 }
6666+6767+(** Set both build and doc completed (for sequential updates) *)
6868+let set_completed t ~build ~doc =
6969+ { t with build_completed = build; doc_completed = doc }
7070+7171+(** Convert progress to JSON *)
7272+let to_json t =
7373+ `Assoc [
7474+ ("run_id", `String t.run_id);
7575+ ("start_time", `String t.start_time);
7676+ ("phase", `String (phase_to_string t.phase));
7777+ ("targets", `List (List.map (fun s -> `String s) t.targets));
7878+ ("solutions_found", `Int t.solutions_found);
7979+ ("solutions_failed", `Int t.solutions_failed);
8080+ ("build_completed", `Int t.build_completed);
8181+ ("build_total", `Int t.build_total);
8282+ ("doc_completed", `Int t.doc_completed);
8383+ ("doc_total", `Int t.doc_total);
8484+ ]
8585+8686+(** Write progress to run directory (atomic via temp+rename) *)
8787+let write ~run_dir t =
8888+ let path = Filename.concat run_dir "progress.json" in
8989+ let temp_path = path ^ ".tmp" in
9090+ let json = to_json t in
9191+ let content = Yojson.Safe.pretty_to_string json in
9292+ Out_channel.with_open_text temp_path (fun oc ->
9393+ Out_channel.output_string oc content);
9494+ Unix.rename temp_path path
9595+9696+(** Delete progress.json when run is complete *)
9797+let delete ~run_dir =
9898+ let path = Filename.concat run_dir "progress.json" in
9999+ try Unix.unlink path with Unix.Unix_error _ -> ()
+188
day10/lib/run_log.ml
···11+(** Run logging for batch processing.
22+33+ Manages timestamp-based run directories with structured logs:
44+ - runs/{id}/summary.json
55+ - runs/{id}/build/{package}.log
66+ - runs/{id}/docs/{package}.log
77+ - latest -> runs/{id} (symlink)
88+*)
99+1010+(** Run metadata *)
1111+type t = {
1212+ id : string;
1313+ start_time : float;
1414+ mutable end_time : float option; [@warning "-69"]
1515+ run_dir : string;
1616+}
1717+1818+(** Summary data *)
1919+type summary = {
2020+ run_id : string;
2121+ start_time : string;
2222+ end_time : string;
2323+ duration_seconds : float;
2424+ targets_requested : int;
2525+ solutions_found : int;
2626+ build_success : int;
2727+ build_failed : int;
2828+ doc_success : int;
2929+ doc_failed : int;
3030+ doc_skipped : int;
3131+ failures : (string * string) list; (** (package, error) pairs *)
3232+}
3333+3434+let log_base_dir = ref "/var/log/day10"
3535+3636+let set_log_base_dir dir = log_base_dir := dir
3737+3838+(** Generate a run ID from current time: YYYY-MM-DD-HHMMSS *)
3939+let generate_run_id () =
4040+ let t = Unix.gettimeofday () in
4141+ let tm = Unix.localtime t in
4242+ Printf.sprintf "%04d-%02d-%02d-%02d%02d%02d"
4343+ (tm.Unix.tm_year + 1900)
4444+ (tm.Unix.tm_mon + 1)
4545+ tm.Unix.tm_mday
4646+ tm.Unix.tm_hour
4747+ tm.Unix.tm_min
4848+ tm.Unix.tm_sec
4949+5050+(** Format Unix timestamp as ISO 8601 string *)
5151+let format_time t =
5252+ let tm = Unix.localtime t in
5353+ Printf.sprintf "%04d-%02d-%02dT%02d:%02d:%02d"
5454+ (tm.Unix.tm_year + 1900)
5555+ (tm.Unix.tm_mon + 1)
5656+ tm.Unix.tm_mday
5757+ tm.Unix.tm_hour
5858+ tm.Unix.tm_min
5959+ tm.Unix.tm_sec
6060+6161+(** Accessor functions *)
6262+let get_id (t : t) = t.id
6363+let get_run_dir (t : t) = t.run_dir
6464+let get_start_time (t : t) = t.start_time
6565+6666+(** Create directory and parents if needed *)
6767+let mkdir_p path =
6868+ let rec create dir =
6969+ if not (Sys.file_exists dir) then begin
7070+ create (Filename.dirname dir);
7171+ try Unix.mkdir dir 0o755 with Unix.Unix_error (Unix.EEXIST, _, _) -> ()
7272+ end
7373+ in
7474+ create path
7575+7676+(** Start a new run - creates directory structure *)
7777+let start_run () =
7878+ let id = generate_run_id () in
7979+ let runs_dir = Filename.concat !log_base_dir "runs" in
8080+ let run_dir = Filename.concat runs_dir id in
8181+ mkdir_p run_dir;
8282+ mkdir_p (Filename.concat run_dir "build");
8383+ mkdir_p (Filename.concat run_dir "docs");
8484+ {
8585+ id;
8686+ start_time = Unix.gettimeofday ();
8787+ end_time = None;
8888+ run_dir;
8989+ }
9090+9191+(** Update the 'latest' symlink to point to current run *)
9292+let update_latest_symlink run_info =
9393+ let latest = Filename.concat !log_base_dir "latest" in
9494+ let target = Filename.concat "runs" run_info.id in
9595+ (* Remove existing symlink if present *)
9696+ (try Unix.unlink latest with Unix.Unix_error _ -> ());
9797+ (* Create new symlink *)
9898+ try Unix.symlink target latest
9999+ with Unix.Unix_error (err, _, _) ->
100100+ Printf.eprintf "[run_log] Warning: failed to create latest symlink: %s\n%!"
101101+ (Unix.error_message err)
102102+103103+(** Add a build log to the run directory (symlink or copy) *)
104104+let add_build_log run_info ~package ~source_log =
105105+ let dest = Filename.concat run_info.run_dir
106106+ (Filename.concat "build" (package ^ ".log")) in
107107+ if Sys.file_exists source_log then begin
108108+ (* Try symlink first, fall back to copy *)
109109+ try
110110+ Unix.symlink source_log dest
111111+ with Unix.Unix_error _ ->
112112+ try
113113+ let content = In_channel.with_open_text source_log In_channel.input_all in
114114+ Out_channel.with_open_text dest (fun oc -> Out_channel.output_string oc content)
115115+ with _ -> ()
116116+ end
117117+118118+(** Add a doc log to the run directory (symlink or copy).
119119+ Optional [layer_hash] adds a suffix for non-blessed docs with different universes. *)
120120+let add_doc_log run_info ~package ~source_log ?layer_hash () =
121121+ let filename = match layer_hash with
122122+ | Some hash -> package ^ "." ^ hash ^ ".log"
123123+ | None -> package ^ ".log"
124124+ in
125125+ let dest = Filename.concat run_info.run_dir
126126+ (Filename.concat "docs" filename) in
127127+ if Sys.file_exists source_log then begin
128128+ (* Remove existing symlink/file if present to avoid EEXIST *)
129129+ (try Unix.unlink dest with Unix.Unix_error _ -> ());
130130+ try
131131+ Unix.symlink source_log dest
132132+ with Unix.Unix_error _ ->
133133+ try
134134+ let content = In_channel.with_open_text source_log In_channel.input_all in
135135+ Out_channel.with_open_text dest (fun oc -> Out_channel.output_string oc content)
136136+ with _ -> ()
137137+ end
138138+139139+(** Convert summary to JSON *)
140140+let summary_to_json summary =
141141+ let failures_json = `List (List.map (fun (pkg, err) ->
142142+ `Assoc [("package", `String pkg); ("error", `String err)]
143143+ ) summary.failures) in
144144+ `Assoc [
145145+ ("run_id", `String summary.run_id);
146146+ ("start_time", `String summary.start_time);
147147+ ("end_time", `String summary.end_time);
148148+ ("duration_seconds", `Float summary.duration_seconds);
149149+ ("targets_requested", `Int summary.targets_requested);
150150+ ("solutions_found", `Int summary.solutions_found);
151151+ ("build_success", `Int summary.build_success);
152152+ ("build_failed", `Int summary.build_failed);
153153+ ("doc_success", `Int summary.doc_success);
154154+ ("doc_failed", `Int summary.doc_failed);
155155+ ("doc_skipped", `Int summary.doc_skipped);
156156+ ("failures", failures_json);
157157+ ]
158158+159159+(** Write summary.json to run directory *)
160160+let write_summary run_info summary =
161161+ let path = Filename.concat run_info.run_dir "summary.json" in
162162+ let json = summary_to_json summary in
163163+ let content = Yojson.Safe.pretty_to_string json in
164164+ Out_channel.with_open_text path (fun oc -> Out_channel.output_string oc content)
165165+166166+(** Finish a run - write summary and update latest symlink *)
167167+let finish_run (run_info : t) ~targets_requested ~solutions_found
168168+ ~build_success ~build_failed ~doc_success ~doc_failed ~doc_skipped
169169+ ~failures =
170170+ let finish_time = Unix.gettimeofday () in
171171+ run_info.end_time <- Some finish_time;
172172+ let summary : summary = {
173173+ run_id = run_info.id;
174174+ start_time = format_time run_info.start_time;
175175+ end_time = format_time finish_time;
176176+ duration_seconds = finish_time -. run_info.start_time;
177177+ targets_requested;
178178+ solutions_found;
179179+ build_success;
180180+ build_failed;
181181+ doc_success;
182182+ doc_failed;
183183+ doc_skipped;
184184+ failures;
185185+ } in
186186+ write_summary run_info summary;
187187+ update_latest_symlink run_info;
188188+ summary
+62
day10/lib/run_log.mli
···11+(** Run logging for batch processing.
22+33+ Manages timestamp-based run directories with structured logs. *)
44+55+(** Run metadata *)
66+type t
77+88+(** Summary data *)
99+type summary = {
1010+ run_id : string;
1111+ start_time : string;
1212+ end_time : string;
1313+ duration_seconds : float;
1414+ targets_requested : int;
1515+ solutions_found : int;
1616+ build_success : int;
1717+ build_failed : int;
1818+ doc_success : int;
1919+ doc_failed : int;
2020+ doc_skipped : int;
2121+ failures : (string * string) list;
2222+}
2323+2424+(** Set the base directory for logs (default: /var/log/day10) *)
2525+val set_log_base_dir : string -> unit
2626+2727+(** Start a new run - creates directory structure *)
2828+val start_run : unit -> t
2929+3030+(** Get the run ID *)
3131+val get_id : t -> string
3232+3333+(** Get the run directory path *)
3434+val get_run_dir : t -> string
3535+3636+(** Get the start time as Unix timestamp *)
3737+val get_start_time : t -> float
3838+3939+(** Format Unix timestamp as ISO 8601 string *)
4040+val format_time : float -> string
4141+4242+(** Add a build log to the run directory *)
4343+val add_build_log : t -> package:string -> source_log:string -> unit
4444+4545+(** Add a doc log to the run directory.
4646+ Optional [layer_hash] adds a suffix for non-blessed docs with different universes. *)
4747+val add_doc_log : t -> package:string -> source_log:string -> ?layer_hash:string -> unit -> unit
4848+4949+(** Finish a run - write summary and update latest symlink *)
5050+val finish_run : t ->
5151+ targets_requested:int ->
5252+ solutions_found:int ->
5353+ build_success:int ->
5454+ build_failed:int ->
5555+ doc_success:int ->
5656+ doc_failed:int ->
5757+ doc_skipped:int ->
5858+ failures:(string * string) list ->
5959+ summary
6060+6161+(** Convert summary to JSON *)
6262+val summary_to_json : summary -> Yojson.Safe.t
···11+(** Read layer info for packages from day10's cache directory.
22+ Uses the packages/{pkg}/ directory structure with symlinks:
33+ - build-{hash} -> ../../build-{hash} (all builds)
44+ - doc-{hash} -> ../../doc-{hash} (all docs)
55+ - blessed-build -> ../../build-{hash} (canonical build if blessed)
66+ - blessed-docs -> ../../doc-{hash} (canonical docs if blessed)
77+ Falls back to scanning build-* directories if no symlinks exist. *)
88+99+type layer_info = {
1010+ package: string;
1111+ deps: string list;
1212+ created: float;
1313+ exit_status: int;
1414+}
1515+1616+(** Read layer.json from a directory and parse it *)
1717+let read_layer_json path =
1818+ if Sys.file_exists path then
1919+ try
2020+ let content = In_channel.with_open_text path In_channel.input_all in
2121+ let json = Yojson.Safe.from_string content in
2222+ let open Yojson.Safe.Util in
2323+ (* Handle deps which may have OpamPackage objects or strings *)
2424+ let deps_list = json |> member "deps" |> to_list in
2525+ let deps = deps_list |> List.filter_map (fun d ->
2626+ match d with
2727+ | `String s -> Some s
2828+ | _ -> None (* Skip non-string deps *)
2929+ ) in
3030+ Some {
3131+ package = json |> member "package" |> to_string;
3232+ deps;
3333+ created = json |> member "created" |> to_float;
3434+ exit_status = json |> member "exit_status" |> to_int;
3535+ }
3636+ with _ -> None
3737+ else
3838+ None
3939+4040+(** Follow a symlink and read layer.json from the target directory *)
4141+let read_layer_via_symlink symlink_path =
4242+ if Sys.file_exists symlink_path then
4343+ try
4444+ let target = Unix.readlink symlink_path in
4545+ (* Target is relative like "../../build-abc123" *)
4646+ let layer_dir = Filename.concat (Filename.dirname symlink_path) target in
4747+ let layer_json = Filename.concat layer_dir "layer.json" in
4848+ read_layer_json layer_json
4949+ with Unix.Unix_error _ -> None
5050+ else
5151+ None
5252+5353+(** Get layer info for a package.
5454+ Checks blessed-build first, then falls back to most recent build symlink,
5555+ then falls back to scanning build-* directories. *)
5656+let get_package_layer ~cache_dir ~platform ~package =
5757+ let pkg_dir = Filename.concat cache_dir
5858+ (Filename.concat platform
5959+ (Filename.concat "packages" package)) in
6060+ (* Try blessed-build first *)
6161+ let blessed_build = Filename.concat pkg_dir "blessed-build" in
6262+ match read_layer_via_symlink blessed_build with
6363+ | Some info -> Some info
6464+ | None ->
6565+ (* Try to find any build-* symlink in the package directory *)
6666+ if Sys.file_exists pkg_dir && Sys.is_directory pkg_dir then
6767+ let build_symlinks = Sys.readdir pkg_dir
6868+ |> Array.to_list
6969+ |> List.filter (fun name -> String.length name > 6 && String.sub name 0 6 = "build-")
7070+ |> List.sort (fun a b -> String.compare b a) (* Most recent first by hash *)
7171+ in
7272+ match build_symlinks with
7373+ | first :: _ ->
7474+ read_layer_via_symlink (Filename.concat pkg_dir first)
7575+ | [] -> None
7676+ else
7777+ (* No package directory - fall back to scanning build-* directories *)
7878+ let platform_dir = Filename.concat cache_dir platform in
7979+ if Sys.file_exists platform_dir && Sys.is_directory platform_dir then
8080+ Sys.readdir platform_dir
8181+ |> Array.to_list
8282+ |> List.filter (fun name -> String.length name > 6 && String.sub name 0 6 = "build-")
8383+ |> List.find_map (fun build_dir ->
8484+ let layer_json = Filename.concat platform_dir
8585+ (Filename.concat build_dir "layer.json") in
8686+ match read_layer_json layer_json with
8787+ | Some info when info.package = package -> Some info
8888+ | _ -> None)
8989+ else
9090+ None
9191+9292+(** List all packages with layer info (for computing reverse deps).
9393+ Returns list of (package_name, layer_info) pairs.
9494+ Uses packages/ directory structure - each subdirectory is a package. *)
9595+let list_all_packages ~cache_dir ~platform =
9696+ let packages_dir = Filename.concat cache_dir
9797+ (Filename.concat platform "packages") in
9898+ if Sys.file_exists packages_dir && Sys.is_directory packages_dir then
9999+ Sys.readdir packages_dir
100100+ |> Array.to_list
101101+ |> List.filter (fun name ->
102102+ (* Each entry should be a directory (package.version) *)
103103+ let path = Filename.concat packages_dir name in
104104+ Sys.is_directory path)
105105+ |> List.filter_map (fun package ->
106106+ match get_package_layer ~cache_dir ~platform ~package with
107107+ | Some info -> Some (package, info)
108108+ | None -> None)
109109+ else
110110+ (* Fall back to scanning build-* directories *)
111111+ let platform_dir = Filename.concat cache_dir platform in
112112+ if Sys.file_exists platform_dir && Sys.is_directory platform_dir then
113113+ Sys.readdir platform_dir
114114+ |> Array.to_list
115115+ |> List.filter (fun name -> String.length name > 6 && String.sub name 0 6 = "build-")
116116+ |> List.filter_map (fun build_dir ->
117117+ let layer_json = Filename.concat platform_dir
118118+ (Filename.concat build_dir "layer.json") in
119119+ match read_layer_json layer_json with
120120+ | Some info -> Some (info.package, info)
121121+ | None -> None)
122122+ else
123123+ []
124124+125125+(** Compute reverse dependencies: which packages depend on the given package.
126126+ Returns a list of package names that have this package in their deps. *)
127127+let get_reverse_deps ~cache_dir ~platform ~package =
128128+ list_all_packages ~cache_dir ~platform
129129+ |> List.filter_map (fun (pkg_name, info) ->
130130+ if List.mem package info.deps then Some pkg_name else None)
131131+ |> List.sort String.compare
+19
day10/web/data/layer_data.mli
···11+(** Read layer info for packages from day10's cache directory *)
22+33+type layer_info = {
44+ package: string;
55+ deps: string list;
66+ created: float; (** Unix timestamp *)
77+ exit_status: int;
88+}
99+1010+(** Get layer info for a specific package.
1111+ Uses symlink if available, falls back to scanning build-* directories. *)
1212+val get_package_layer : cache_dir:string -> platform:string -> package:string -> layer_info option
1313+1414+(** List all packages with their layer info.
1515+ Used for computing reverse dependencies. *)
1616+val list_all_packages : cache_dir:string -> platform:string -> (string * layer_info) list
1717+1818+(** Get reverse dependencies: packages that depend on the given package. *)
1919+val get_reverse_deps : cache_dir:string -> platform:string -> package:string -> string list
+50
day10/web/data/lock_data.ml
···11+(** Read active build/doc/tool locks from day10's cache directory. *)
22+33+type stage = Build | Doc | Tool
44+55+type active_lock = {
66+ stage : stage;
77+ package : string;
88+ version : string;
99+ universe : string option; (* For Build/Doc: dependency hash. For Tool: OCaml version if applicable *)
1010+ pid : int;
1111+ start_time : float;
1212+ duration : float; (* seconds since start *)
1313+ layer_name : string option; (* Final layer directory name *)
1414+ temp_log_path : string option; (* Temp log path for live viewing *)
1515+}
1616+1717+(** Convert library lock record to web-friendly format *)
1818+let of_lib_lock (lock : Day10_lib.Build_lock.lock_info) =
1919+ let now = Unix.time () in
2020+ let stage = match lock.stage with
2121+ | Day10_lib.Build_lock.Build -> Build
2222+ | Day10_lib.Build_lock.Doc -> Doc
2323+ | Day10_lib.Build_lock.Tool -> Tool
2424+ in
2525+ {
2626+ stage;
2727+ package = lock.package;
2828+ version = lock.version;
2929+ universe = lock.universe;
3030+ pid = lock.pid;
3131+ start_time = lock.start_time;
3232+ duration = now -. lock.start_time;
3333+ layer_name = lock.layer_name;
3434+ temp_log_path = lock.temp_log_path;
3535+ }
3636+3737+let list_active_locks ~cache_dir =
3838+ Day10_lib.Build_lock.list_active ~cache_dir
3939+ |> List.map of_lib_lock
4040+4141+let has_active_locks ~cache_dir =
4242+ match Day10_lib.Build_lock.list_active ~cache_dir with
4343+ | [] -> false
4444+ | _ -> true
4545+4646+let format_duration seconds =
4747+ let seconds = int_of_float seconds in
4848+ if seconds < 60 then Printf.sprintf "%ds" seconds
4949+ else if seconds < 3600 then Printf.sprintf "%dm%ds" (seconds / 60) (seconds mod 60)
5050+ else Printf.sprintf "%dh%dm" (seconds / 3600) ((seconds mod 3600) / 60)
+24
day10/web/data/lock_data.mli
···11+(** Read active build/doc/tool locks from day10's cache directory *)
22+33+type stage = Build | Doc | Tool
44+55+type active_lock = {
66+ stage : stage;
77+ package : string;
88+ version : string;
99+ universe : string option; (** For Build/Doc: dependency hash. For Tool: OCaml version if applicable *)
1010+ pid : int;
1111+ start_time : float;
1212+ duration : float;
1313+ layer_name : string option; (** Final layer directory name *)
1414+ temp_log_path : string option; (** Temp log path for live viewing *)
1515+}
1616+1717+(** List all currently active locks *)
1818+val list_active_locks : cache_dir:string -> active_lock list
1919+2020+(** Check if there are any active locks *)
2121+val has_active_locks : cache_dir:string -> bool
2222+2323+(** Format duration in human-readable form *)
2424+val format_duration : float -> string
+58
day10/web/data/package_data.ml
···11+(** Read package data from day10's html directory *)
22+33+let list_package_names ~html_dir =
44+ let p_dir = Filename.concat html_dir "p" in
55+ if Sys.file_exists p_dir && Sys.is_directory p_dir then
66+ Sys.readdir p_dir
77+ |> Array.to_list
88+ |> List.filter (fun name ->
99+ let path = Filename.concat p_dir name in
1010+ Sys.is_directory path)
1111+ |> List.sort String.compare
1212+ else
1313+ []
1414+1515+let compare_versions v1 v2 =
1616+ (* Simple version comparison - compare segments numerically where possible *)
1717+ let parse v =
1818+ String.split_on_char '.' v
1919+ |> List.map (fun s -> try `Int (int_of_string s) with _ -> `Str s)
2020+ in
2121+ let rec cmp l1 l2 = match l1, l2 with
2222+ | [], [] -> 0
2323+ | [], _ -> -1
2424+ | _, [] -> 1
2525+ | `Int a :: t1, `Int b :: t2 ->
2626+ let c = Int.compare a b in if c <> 0 then c else cmp t1 t2
2727+ | `Str a :: t1, `Str b :: t2 ->
2828+ let c = String.compare a b in if c <> 0 then c else cmp t1 t2
2929+ | `Int _ :: _, `Str _ :: _ -> -1
3030+ | `Str _ :: _, `Int _ :: _ -> 1
3131+ in
3232+ cmp (parse v2) (parse v1) (* Descending order *)
3333+3434+let list_package_versions ~html_dir ~name =
3535+ let pkg_dir = Filename.concat (Filename.concat html_dir "p") name in
3636+ if Sys.file_exists pkg_dir && Sys.is_directory pkg_dir then
3737+ Sys.readdir pkg_dir
3838+ |> Array.to_list
3939+ |> List.filter (fun version ->
4040+ let path = Filename.concat pkg_dir version in
4141+ Sys.is_directory path)
4242+ |> List.sort compare_versions
4343+ else
4444+ []
4545+4646+let list_packages ~html_dir =
4747+ list_package_names ~html_dir
4848+ |> List.concat_map (fun name ->
4949+ list_package_versions ~html_dir ~name
5050+ |> List.map (fun version -> (name, version)))
5151+5252+let package_has_docs ~html_dir ~name ~version =
5353+ let path = Filename.concat html_dir
5454+ (Filename.concat "p" (Filename.concat name version)) in
5555+ Sys.file_exists path && Sys.is_directory path
5656+5757+let docs_path ~name ~version =
5858+ Printf.sprintf "/docs/p/%s/%s/" name version
+16
day10/web/data/package_data.mli
···11+(** Read package data from day10's html directory *)
22+33+(** List all (name, version) pairs with docs *)
44+val list_packages : html_dir:string -> (string * string) list
55+66+(** List unique package names *)
77+val list_package_names : html_dir:string -> string list
88+99+(** List all versions for a package name, sorted descending *)
1010+val list_package_versions : html_dir:string -> name:string -> string list
1111+1212+(** Check if docs exist for a package version *)
1313+val package_has_docs : html_dir:string -> name:string -> version:string -> bool
1414+1515+(** Get the docs URL path for a package *)
1616+val docs_path : name:string -> version:string -> string
+60
day10/web/data/progress_data.ml
···11+(** Read progress.json for dashboard display *)
22+33+type phase =
44+ | Solving
55+ | Blessings
66+ | Building
77+ | Gc
88+ | Completed
99+1010+let phase_of_string = function
1111+ | "solving" -> Solving
1212+ | "blessings" -> Blessings
1313+ | "building" -> Building
1414+ | "gc" -> Gc
1515+ | "completed" -> Completed
1616+ | _ -> Solving
1717+1818+let phase_to_string = function
1919+ | Solving -> "Solving"
2020+ | Blessings -> "Computing Blessings"
2121+ | Building -> "Building"
2222+ | Gc -> "Garbage Collection"
2323+ | Completed -> "Completed"
2424+2525+type t = {
2626+ run_id : string;
2727+ start_time : string;
2828+ phase : phase;
2929+ targets : string list;
3030+ solutions_found : int;
3131+ solutions_failed : int;
3232+ build_completed : int;
3333+ build_total : int;
3434+ doc_completed : int;
3535+ doc_total : int;
3636+}
3737+3838+let read ~log_dir ~run_id =
3939+ let path = Filename.concat log_dir
4040+ (Filename.concat "runs" (Filename.concat run_id "progress.json")) in
4141+ if Sys.file_exists path then
4242+ try
4343+ let content = In_channel.with_open_text path In_channel.input_all in
4444+ let json = Yojson.Safe.from_string content in
4545+ let open Yojson.Safe.Util in
4646+ Some {
4747+ run_id = json |> member "run_id" |> to_string;
4848+ start_time = json |> member "start_time" |> to_string;
4949+ phase = json |> member "phase" |> to_string |> phase_of_string;
5050+ targets = json |> member "targets" |> to_list |> List.map to_string;
5151+ solutions_found = json |> member "solutions_found" |> to_int;
5252+ solutions_failed = json |> member "solutions_failed" |> to_int;
5353+ build_completed = json |> member "build_completed" |> to_int;
5454+ build_total = json |> member "build_total" |> to_int;
5555+ doc_completed = json |> member "doc_completed" |> to_int;
5656+ doc_total = json |> member "doc_total" |> to_int;
5757+ }
5858+ with _ -> None
5959+ else
6060+ None
+140
day10/web/data/run_data.ml
···11+(** Read run data from day10's log directory *)
22+33+let list_runs ~log_dir =
44+ let runs_dir = Filename.concat log_dir "runs" in
55+ if Sys.file_exists runs_dir && Sys.is_directory runs_dir then
66+ Sys.readdir runs_dir
77+ |> Array.to_list
88+ |> List.filter (fun name ->
99+ let path = Filename.concat runs_dir name in
1010+ Sys.is_directory path)
1111+ |> List.sort (fun a b -> String.compare b a) (* Descending *)
1212+ else
1313+ []
1414+1515+let get_latest_run_id ~log_dir =
1616+ let latest = Filename.concat log_dir "latest" in
1717+ if Sys.file_exists latest then
1818+ try
1919+ let target = Unix.readlink latest in
2020+ (* Target is like "runs/2026-02-04-120000" *)
2121+ Some (Filename.basename target)
2222+ with Unix.Unix_error _ -> None
2323+ else
2424+ None
2525+2626+(** Get the most recent run, including runs in progress.
2727+ This scans the runs/ directory directly rather than relying on the
2828+ 'latest' symlink which is only created when a run completes. *)
2929+let get_most_recent_run_id ~log_dir =
3030+ match list_runs ~log_dir with
3131+ | [] -> None
3232+ | most_recent :: _ -> Some most_recent
3333+3434+let read_summary ~log_dir ~run_id =
3535+ let path = Filename.concat log_dir
3636+ (Filename.concat "runs" (Filename.concat run_id "summary.json")) in
3737+ if Sys.file_exists path then
3838+ try
3939+ let content = In_channel.with_open_text path In_channel.input_all in
4040+ let json = Yojson.Safe.from_string content in
4141+ let open Yojson.Safe.Util in
4242+ let failures =
4343+ json |> member "failures" |> to_list
4444+ |> List.map (fun f ->
4545+ (f |> member "package" |> to_string,
4646+ f |> member "error" |> to_string))
4747+ in
4848+ Some {
4949+ Day10_lib.Run_log.run_id = json |> member "run_id" |> to_string;
5050+ start_time = json |> member "start_time" |> to_string;
5151+ end_time = json |> member "end_time" |> to_string;
5252+ duration_seconds = json |> member "duration_seconds" |> to_float;
5353+ targets_requested = json |> member "targets_requested" |> to_int;
5454+ solutions_found = json |> member "solutions_found" |> to_int;
5555+ build_success = json |> member "build_success" |> to_int;
5656+ build_failed = json |> member "build_failed" |> to_int;
5757+ doc_success = json |> member "doc_success" |> to_int;
5858+ doc_failed = json |> member "doc_failed" |> to_int;
5959+ doc_skipped = json |> member "doc_skipped" |> to_int;
6060+ failures;
6161+ }
6262+ with _ -> None
6363+ else
6464+ None
6565+6666+let read_log_file path =
6767+ if Sys.file_exists path then
6868+ try Some (In_channel.with_open_text path In_channel.input_all)
6969+ with _ -> None
7070+ else
7171+ None
7272+7373+let read_build_log ~log_dir ~run_id ~package =
7474+ let path = Filename.concat log_dir
7575+ (Filename.concat "runs"
7676+ (Filename.concat run_id
7777+ (Filename.concat "build" (package ^ ".log")))) in
7878+ read_log_file path
7979+8080+let read_doc_log ~log_dir ~run_id ~package =
8181+ let path = Filename.concat log_dir
8282+ (Filename.concat "runs"
8383+ (Filename.concat run_id
8484+ (Filename.concat "docs" (package ^ ".log")))) in
8585+ read_log_file path
8686+8787+let list_logs_in_dir dir =
8888+ if Sys.file_exists dir && Sys.is_directory dir then
8989+ Sys.readdir dir
9090+ |> Array.to_list
9191+ |> List.filter (fun name -> Filename.check_suffix name ".log")
9292+ |> List.map (fun name -> Filename.chop_suffix name ".log")
9393+ |> List.sort String.compare
9494+ else
9595+ []
9696+9797+let list_build_logs ~log_dir ~run_id =
9898+ let dir = Filename.concat log_dir
9999+ (Filename.concat "runs" (Filename.concat run_id "build")) in
100100+ list_logs_in_dir dir
101101+102102+let list_doc_logs ~log_dir ~run_id =
103103+ let dir = Filename.concat log_dir
104104+ (Filename.concat "runs" (Filename.concat run_id "docs")) in
105105+ list_logs_in_dir dir
106106+107107+let has_build_log ~log_dir ~run_id ~package =
108108+ let path = Filename.concat log_dir
109109+ (Filename.concat "runs"
110110+ (Filename.concat run_id
111111+ (Filename.concat "build" (package ^ ".log")))) in
112112+ Sys.file_exists path
113113+114114+let has_doc_log ~log_dir ~run_id ~package =
115115+ let path = Filename.concat log_dir
116116+ (Filename.concat "runs"
117117+ (Filename.concat run_id
118118+ (Filename.concat "docs" (package ^ ".log")))) in
119119+ Sys.file_exists path
120120+121121+let is_run_in_progress ~log_dir ~run_id =
122122+ let summary_path = Filename.concat log_dir
123123+ (Filename.concat "runs" (Filename.concat run_id "summary.json")) in
124124+ (* If no summary.json exists, the run is likely still in progress *)
125125+ not (Sys.file_exists summary_path)
126126+127127+let get_package_status_from_summary ~log_dir ~run_id ~package =
128128+ match read_summary ~log_dir ~run_id with
129129+ | None -> None
130130+ | Some summary ->
131131+ (* Check if package is in the failures list *)
132132+ match List.find_opt (fun (pkg, _) -> pkg = package) summary.failures with
133133+ | Some (_, error) -> Some (`Failed error)
134134+ | None ->
135135+ (* Not in failures - check if logs exist to confirm it was processed *)
136136+ if has_build_log ~log_dir ~run_id ~package ||
137137+ has_doc_log ~log_dir ~run_id ~package then
138138+ Some `Success
139139+ else
140140+ Some `Not_in_run
+40
day10/web/data/run_data.mli
···11+(** Read run data from day10's log directory *)
22+33+(** List all run IDs, most recent first *)
44+val list_runs : log_dir:string -> string list
55+66+(** Get the latest run ID from the 'latest' symlink (completed runs only) *)
77+val get_latest_run_id : log_dir:string -> string option
88+99+(** Get the most recent run ID, including runs in progress.
1010+ Scans the runs/ directory directly rather than relying on the 'latest' symlink. *)
1111+val get_most_recent_run_id : log_dir:string -> string option
1212+1313+(** Read summary.json for a run *)
1414+val read_summary : log_dir:string -> run_id:string -> Day10_lib.Run_log.summary option
1515+1616+(** Read a build log file *)
1717+val read_build_log : log_dir:string -> run_id:string -> package:string -> string option
1818+1919+(** Read a doc log file *)
2020+val read_doc_log : log_dir:string -> run_id:string -> package:string -> string option
2121+2222+(** List all build logs in a run *)
2323+val list_build_logs : log_dir:string -> run_id:string -> string list
2424+2525+(** List all doc logs in a run *)
2626+val list_doc_logs : log_dir:string -> run_id:string -> string list
2727+2828+(** Check if build log exists for a package *)
2929+val has_build_log : log_dir:string -> run_id:string -> package:string -> bool
3030+3131+(** Check if doc log exists for a package *)
3232+val has_doc_log : log_dir:string -> run_id:string -> package:string -> bool
3333+3434+(** Check if a run is still in progress (no summary.json yet) *)
3535+val is_run_in_progress : log_dir:string -> run_id:string -> bool
3636+3737+(** Get package status from summary failures list *)
3838+val get_package_status_from_summary :
3939+ log_dir:string -> run_id:string -> package:string ->
4040+ [`Success | `Failed of string | `Not_in_run] option
···11+(** Live log viewer for in-progress builds *)
22+33+(** Parse lock_file name to extract package info.
44+ E.g., "build-confero.0.1.1-6b695ab..." -> Some ("build", "confero.0.1.1")
55+ E.g., "doc-cmdliner.2.1.0-abc123..." -> Some ("doc", "cmdliner.2.1.0") *)
66+let parse_lock_file_name lock_file =
77+ let parse_pkg_version rest =
88+ (* Find the last dash that might separate universe (32 hex chars) *)
99+ match String.rindex_opt rest '-' with
1010+ | Some i when String.length rest - i - 1 = 32 ->
1111+ (* Has universe hash, extract package.version *)
1212+ Some (String.sub rest 0 i)
1313+ | _ ->
1414+ (* No universe hash, whole thing is package.version *)
1515+ Some rest
1616+ in
1717+ if String.length lock_file > 6 && String.sub lock_file 0 6 = "build-" then
1818+ let rest = String.sub lock_file 6 (String.length lock_file - 6) in
1919+ parse_pkg_version rest |> Option.map (fun pv -> ("build", pv))
2020+ else if String.length lock_file > 4 && String.sub lock_file 0 4 = "doc-" then
2121+ let rest = String.sub lock_file 4 (String.length lock_file - 4) in
2222+ parse_pkg_version rest |> Option.map (fun pv -> ("doc", pv))
2323+ else
2424+ None
2525+2626+(** Try to find a completed layer log by looking in packages/{pkg_str}/ for layer symlinks *)
2727+let find_completed_layer_log ~cache_dir ~platform ~stage ~pkg_str =
2828+ let pkg_dir = Filename.concat (Filename.concat (Filename.concat cache_dir platform) "packages") pkg_str in
2929+ if not (Sys.file_exists pkg_dir) then None
3030+ else
3131+ try
3232+ (* Look for build-* or doc-* symlinks in the package directory *)
3333+ let prefix = stage ^ "-" in
3434+ let candidates = Sys.readdir pkg_dir
3535+ |> Array.to_list
3636+ |> List.filter (fun name -> String.length name > String.length prefix && String.sub name 0 (String.length prefix) = prefix)
3737+ |> List.sort (fun a b -> String.compare b a) (* Most recent first by name *)
3838+ in
3939+ match candidates with
4040+ | layer_name :: _ ->
4141+ let log_file = if stage = "doc" then "odoc-voodoo-all.log" else "build.log" in
4242+ let layer_dir = Filename.concat (Filename.concat cache_dir platform) layer_name in
4343+ let log_path = Filename.concat layer_dir log_file in
4444+ if Sys.file_exists log_path then
4545+ Some (log_path, In_channel.with_open_text log_path In_channel.input_all)
4646+ else
4747+ None
4848+ | [] -> None
4949+ with _ -> None
5050+5151+(** Get the log content for a lock file *)
5252+let get_log_content ~cache_dir ~platform ~lock_file =
5353+ let locks_dir = Filename.concat cache_dir "locks" in
5454+ let lock_path = Filename.concat locks_dir (lock_file ^ ".lock") in
5555+ if Sys.file_exists lock_path then
5656+ try
5757+ let content = In_channel.with_open_text lock_path In_channel.input_all in
5858+ let lines = String.split_on_char '\n' content in
5959+ match lines with
6060+ | _pid :: _time :: _layer_name :: temp_log_path :: _ when String.trim temp_log_path <> "" ->
6161+ let log_path = String.trim temp_log_path in
6262+ if Sys.file_exists log_path then
6363+ Some log_path, In_channel.with_open_text log_path In_channel.input_all
6464+ else
6565+ None, Printf.sprintf "Log file not yet created: %s" log_path
6666+ | _pid :: _time :: layer_name :: _ when String.trim layer_name <> "" ->
6767+ (* Try completed layer log *)
6868+ let layer = String.trim layer_name in
6969+ let log_file = if String.length layer > 4 && String.sub layer 0 4 = "doc-" then
7070+ "odoc-voodoo-all.log"
7171+ else
7272+ "build.log"
7373+ in
7474+ let layer_log = Filename.concat (Filename.concat (Filename.concat cache_dir platform) layer) log_file in
7575+ if Sys.file_exists layer_log then
7676+ Some layer_log, In_channel.with_open_text layer_log In_channel.input_all
7777+ else
7878+ None, Printf.sprintf "No log file found (layer: %s)" layer
7979+ | _ ->
8080+ None, "Lock file format not recognized"
8181+ with exn ->
8282+ None, Printf.sprintf "Error reading lock file: %s" (Printexc.to_string exn)
8383+ else
8484+ (* Lock file doesn't exist - try to find completed layer by package name *)
8585+ match parse_lock_file_name lock_file with
8686+ | Some (stage, pkg_str) ->
8787+ (match find_completed_layer_log ~cache_dir ~platform ~stage ~pkg_str with
8888+ | Some (log_path, content) ->
8989+ Some log_path, content
9090+ | None ->
9191+ None, Printf.sprintf "Build completed but log not found for %s" pkg_str)
9292+ | None ->
9393+ None, "Lock file not found and could not parse package name"
9494+9595+(** Render just the log content (for AJAX refresh) *)
9696+let content_only ~cache_dir ~platform ~lock_file =
9797+ let _log_path, content = get_log_content ~cache_dir ~platform ~lock_file in
9898+ (* Escape HTML and preserve formatting *)
9999+ let escaped = content
100100+ |> String.split_on_char '&' |> String.concat "&"
101101+ |> String.split_on_char '<' |> String.concat "<"
102102+ |> String.split_on_char '>' |> String.concat ">"
103103+ in
104104+ Printf.sprintf "<pre class=\"log-content\">%s</pre>" escaped
105105+106106+(** Parse lock filename for display *)
107107+let parse_lock_name lock_file =
108108+ if String.length lock_file > 6 && String.sub lock_file 0 6 = "build-" then
109109+ let rest = String.sub lock_file 6 (String.length lock_file - 6) in
110110+ "Build", rest
111111+ else if String.length lock_file > 4 && String.sub lock_file 0 4 = "doc-" then
112112+ let rest = String.sub lock_file 4 (String.length lock_file - 4) in
113113+ "Doc", rest
114114+ else if String.length lock_file > 5 && String.sub lock_file 0 5 = "tool-" then
115115+ let rest = String.sub lock_file 5 (String.length lock_file - 5) in
116116+ "Tool", rest
117117+ else
118118+ "Unknown", lock_file
119119+120120+(** Check if the lock is still active *)
121121+let is_lock_active ~cache_dir ~lock_file =
122122+ let locks_dir = Filename.concat cache_dir "locks" in
123123+ let lock_path = Filename.concat locks_dir (lock_file ^ ".lock") in
124124+ Sys.file_exists lock_path
125125+126126+(** Render full live log page with auto-refresh *)
127127+let render ~cache_dir ~platform ~lock_file =
128128+ let stage, name = parse_lock_name lock_file in
129129+ let log_path_opt, content = get_log_content ~cache_dir ~platform ~lock_file in
130130+ let is_active = is_lock_active ~cache_dir ~lock_file in
131131+ let log_path_display = match log_path_opt with
132132+ | Some p -> p
133133+ | None -> "(not available)"
134134+ in
135135+ let escaped_content = content
136136+ |> String.split_on_char '&' |> String.concat "&"
137137+ |> String.split_on_char '<' |> String.concat "<"
138138+ |> String.split_on_char '>' |> String.concat ">"
139139+ in
140140+ let status_indicator = if is_active then
141141+ {|<span class="led led-active" style="margin-right: 0.5rem;"></span>|}
142142+ else
143143+ {|<span style="color: var(--success); margin-right: 0.5rem;">✓</span>|}
144144+ in
145145+ let status_label = if is_active then "In Progress" else "Completed" in
146146+ Layout.page ~title:(Printf.sprintf "%s Log: %s %s" status_label stage name) ~content:(Printf.sprintf {|
147147+ <div class="container">
148148+ <nav style="margin-bottom: 1rem;">
149149+ <a href="/">← Dashboard</a>
150150+ </nav>
151151+152152+ <div class="card">
153153+ <h1 style="margin-top: 0;">
154154+ %s
155155+ %s: %s
156156+ </h1>
157157+ <p style="color: var(--text-muted); font-size: 0.9em;">
158158+ Log file: <code>%s</code>
159159+ </p>
160160+161161+ <div style="margin: 1rem 0;">
162162+ <label style="display: flex; align-items: center; gap: 0.5rem;">
163163+ <input type="checkbox" id="auto-refresh" %s>
164164+ Auto-refresh (every 2s)
165165+ </label>
166166+ <label style="display: flex; align-items: center; gap: 0.5rem; margin-top: 0.5rem;">
167167+ <input type="checkbox" id="auto-scroll" checked>
168168+ Auto-scroll to bottom
169169+ </label>
170170+ </div>
171171+ </div>
172172+173173+ <div class="card" style="margin-top: 1rem;">
174174+ <div id="log-container" style="max-height: 70vh; overflow-y: auto; background: var(--bg-tertiary); padding: 1rem; border-radius: 4px;">
175175+ <pre class="log-content" style="margin: 0; white-space: pre-wrap; word-wrap: break-word; font-size: 0.85em;">%s</pre>
176176+ </div>
177177+ </div>
178178+ </div>
179179+180180+ <script>
181181+ (function() {
182182+ const container = document.getElementById('log-container');
183183+ const autoRefreshCheckbox = document.getElementById('auto-refresh');
184184+ const autoScrollCheckbox = document.getElementById('auto-scroll');
185185+ let refreshInterval = null;
186186+187187+ function scrollToBottom() {
188188+ if (autoScrollCheckbox.checked) {
189189+ container.scrollTop = container.scrollHeight;
190190+ }
191191+ }
192192+193193+ function refreshLog() {
194194+ fetch('/live/%s/content')
195195+ .then(response => response.text())
196196+ .then(html => {
197197+ container.innerHTML = html;
198198+ scrollToBottom();
199199+ })
200200+ .catch(err => console.error('Refresh failed:', err));
201201+ }
202202+203203+ function startRefresh() {
204204+ if (refreshInterval) clearInterval(refreshInterval);
205205+ refreshInterval = setInterval(refreshLog, 2000);
206206+ }
207207+208208+ function stopRefresh() {
209209+ if (refreshInterval) {
210210+ clearInterval(refreshInterval);
211211+ refreshInterval = null;
212212+ }
213213+ }
214214+215215+ autoRefreshCheckbox.addEventListener('change', function() {
216216+ if (this.checked) {
217217+ startRefresh();
218218+ } else {
219219+ stopRefresh();
220220+ }
221221+ });
222222+223223+ // Start auto-refresh only if checkbox is checked, and scroll to bottom
224224+ if (autoRefreshCheckbox.checked) {
225225+ startRefresh();
226226+ }
227227+ scrollToBottom();
228228+ })();
229229+ </script>
230230+ |} status_indicator stage name (if is_active then "checked" else "") log_path_display escaped_content lock_file)
+309
day10/web/views/packages.ml
···11+(** Package list and detail pages *)
22+33+let list_page ~html_dir =
44+ let packages = Day10_web_data.Package_data.list_packages ~html_dir in
55+ let rows = packages |> List.map (fun (name, version) ->
66+ Printf.sprintf {|
77+ <tr>
88+ <td><a href="/packages/%s/%s">%s</a></td>
99+ <td>%s</td>
1010+ <td>%s</td>
1111+ <td><a href="/docs/p/%s/%s/doc/index.html">View Docs</a></td>
1212+ </tr>
1313+ |} name version name version (Layout.badge `Success) name version
1414+ ) |> String.concat "\n" in
1515+1616+ let content = Printf.sprintf {|
1717+ <h1>Packages</h1>
1818+ <div class="card">
1919+ <input type="search" id="pkg-search" placeholder="Search packages..." onkeyup="filterTable()">
2020+ <table id="pkg-table">
2121+ <thead>
2222+ <tr>
2323+ <th>Package</th>
2424+ <th>Version</th>
2525+ <th>Docs Status</th>
2626+ <th>Links</th>
2727+ </tr>
2828+ </thead>
2929+ <tbody>
3030+ %s
3131+ </tbody>
3232+ </table>
3333+ </div>
3434+ <script>
3535+ function filterTable() {
3636+ const filter = document.getElementById('pkg-search').value.toLowerCase();
3737+ const rows = document.querySelectorAll('#pkg-table tbody tr');
3838+ rows.forEach(row => {
3939+ const text = row.textContent.toLowerCase();
4040+ row.style.display = text.includes(filter) ? '' : 'none';
4141+ });
4242+ }
4343+ </script>
4444+ |} rows
4545+ in
4646+ Layout.page ~title:"Packages" ~content
4747+4848+let detail_page ~html_dir ~cache_dir ~platform ~log_dir ~name ~version =
4949+ let package = name ^ "." ^ version in
5050+ if not (Day10_web_data.Package_data.package_has_docs ~html_dir ~name ~version) then
5151+ Layout.page ~title:"Package Not Found" ~content:(Printf.sprintf {|
5252+ <h1>Package Not Found</h1>
5353+ <p class="card">No documentation found for %s</p>
5454+ <p><a href="/packages">← Back to packages</a></p>
5555+ |} package)
5656+ else
5757+ let all_versions = Day10_web_data.Package_data.list_package_versions ~html_dir ~name in
5858+ let versions_list = all_versions |> List.map (fun v ->
5959+ if v = version then
6060+ Printf.sprintf "<li><strong>%s</strong> (current)</li>" v
6161+ else
6262+ Printf.sprintf {|<li><a href="/packages/%s/%s">%s</a></li>|} name v v
6363+ ) |> String.concat "\n" in
6464+6565+ (* Get layer info for dependencies and build timestamp *)
6666+ let layer_info = Day10_web_data.Layer_data.get_package_layer
6767+ ~cache_dir ~platform ~package in
6868+6969+ (* Get latest run ID for log links *)
7070+ let latest_run = Day10_web_data.Run_data.get_latest_run_id ~log_dir in
7171+7272+ (* Determine build status from multiple sources *)
7373+ let build_status, build_time = match layer_info with
7474+ | Some info ->
7575+ let timestamp = Unix.gmtime info.created in
7676+ let time_str = Printf.sprintf "%04d-%02d-%02d %02d:%02d:%02d UTC"
7777+ (timestamp.Unix.tm_year + 1900) (timestamp.Unix.tm_mon + 1)
7878+ timestamp.Unix.tm_mday timestamp.Unix.tm_hour
7979+ timestamp.Unix.tm_min timestamp.Unix.tm_sec in
8080+ let status = if info.exit_status = 0 then `Success else `Failed in
8181+ (status, Some time_str)
8282+ | None ->
8383+ (* No layer info - check logs and summary *)
8484+ match latest_run with
8585+ | Some run_id ->
8686+ (* Check if run is still in progress *)
8787+ if Day10_web_data.Run_data.is_run_in_progress ~log_dir ~run_id then
8888+ (* Check if we have logs for this package *)
8989+ if Day10_web_data.Run_data.has_build_log ~log_dir ~run_id ~package then
9090+ (`In_progress, None)
9191+ else
9292+ (`Pending, None)
9393+ else
9494+ (* Run finished - check summary for status *)
9595+ begin match Day10_web_data.Run_data.get_package_status_from_summary
9696+ ~log_dir ~run_id ~package with
9797+ | Some `Success -> (`Success, None)
9898+ | Some (`Failed _) -> (`Failed, None)
9999+ | Some `Not_in_run -> (`Unknown, None)
100100+ | None -> (`Unknown, None)
101101+ end
102102+ | None -> (`Unknown, None)
103103+ in
104104+105105+ (* Build info section *)
106106+ let status_badge = match build_status with
107107+ | `Success -> Layout.badge `Success
108108+ | `Failed -> Layout.badge `Failed
109109+ | `In_progress ->
110110+ {|<span class="badge badge-warning" style="animation: pulse-glow 1s ease-in-out infinite;">building</span>|}
111111+ | `Pending ->
112112+ {|<span class="badge badge-warning">pending</span>|}
113113+ | `Unknown -> Layout.badge `Skipped
114114+ in
115115+116116+ let build_status_content =
117117+ let time_line = match build_time with
118118+ | Some t -> Printf.sprintf {|<p><strong>Built:</strong> %s</p>|} t
119119+ | None -> ""
120120+ in
121121+ Printf.sprintf {|
122122+ <p><strong>Status:</strong> %s</p>
123123+ %s
124124+ |} status_badge time_line
125125+ in
126126+127127+ (* Log links - show if logs exist *)
128128+ let log_links = match latest_run with
129129+ | Some run_id ->
130130+ let has_build = Day10_web_data.Run_data.has_build_log ~log_dir ~run_id ~package in
131131+ let has_docs = Day10_web_data.Run_data.has_doc_log ~log_dir ~run_id ~package in
132132+ if has_build || has_docs then
133133+ let build_link = if has_build then
134134+ Printf.sprintf {|<a href="/runs/%s/build/%s">Build Log →</a>|} run_id package
135135+ else
136136+ {|<span style="color: var(--text-dim);">No build log</span>|}
137137+ in
138138+ let doc_link = if has_docs then
139139+ Printf.sprintf {|<a href="/runs/%s/docs/%s">Doc Log →</a>|} run_id package
140140+ else
141141+ {|<span style="color: var(--text-dim);">No doc log</span>|}
142142+ in
143143+ Printf.sprintf {|
144144+ <p style="margin-top: 1rem;">
145145+ %s
146146+ <span style="margin: 0 0.5rem; color: var(--text-dim);">|</span>
147147+ %s
148148+ </p>
149149+ |} build_link doc_link
150150+ else
151151+ {|<p style="margin-top: 1rem; color: var(--text-dim);">No logs in latest run.</p>|}
152152+ | None ->
153153+ {|<p style="margin-top: 1rem; color: var(--text-dim);">No runs recorded yet.</p>|}
154154+ in
155155+156156+ let build_info = Printf.sprintf {|
157157+ <div class="card">
158158+ <h2>Build & Logs</h2>
159159+ %s
160160+ %s
161161+ </div>
162162+ |} build_status_content log_links
163163+ in
164164+165165+ (* Parse "name.version" format - version starts at first .digit or .v followed by digit *)
166166+ let parse_package_str s =
167167+ let len = String.length s in
168168+ let rec find_version_start i =
169169+ if i >= len - 1 then None
170170+ else if s.[i] = '.' then
171171+ let next = s.[i + 1] in
172172+ if next >= '0' && next <= '9' then Some i
173173+ else if next = 'v' && i + 2 < len && s.[i + 2] >= '0' && s.[i + 2] <= '9' then Some i
174174+ else find_version_start (i + 1)
175175+ else find_version_start (i + 1)
176176+ in
177177+ match find_version_start 0 with
178178+ | Some i -> Some (String.sub s 0 i, String.sub s (i + 1) (len - i - 1))
179179+ | None -> None
180180+ in
181181+182182+ (* Dependencies section - always show *)
183183+ let deps_section =
184184+ let deps_content = match layer_info with
185185+ | Some info when info.deps <> [] ->
186186+ let deps_list = info.deps
187187+ |> List.map (fun dep ->
188188+ match parse_package_str dep with
189189+ | Some (dep_name, dep_version) ->
190190+ Printf.sprintf {|<li><a href="/packages/%s/%s">%s</a></li>|} dep_name dep_version dep
191191+ | None ->
192192+ Printf.sprintf "<li>%s</li>" dep)
193193+ |> String.concat "\n" in
194194+ Printf.sprintf {|<ul>%s</ul>|} deps_list
195195+ | Some _ ->
196196+ {|<p style="color: var(--text-dim);">No dependencies.</p>|}
197197+ | None ->
198198+ {|<p style="color: var(--text-dim);">Dependency information not available.</p>|}
199199+ in
200200+ let deps_count = match layer_info with
201201+ | Some info -> List.length info.deps
202202+ | None -> 0
203203+ in
204204+ Printf.sprintf {|
205205+ <div class="card">
206206+ <h2>Dependencies (%d)</h2>
207207+ %s
208208+ </div>
209209+ |} deps_count deps_content
210210+ in
211211+212212+ (* Reverse dependencies section - always show *)
213213+ let reverse_deps = Day10_web_data.Layer_data.get_reverse_deps
214214+ ~cache_dir ~platform ~package in
215215+ let rev_deps_section =
216216+ let rev_deps_content = if reverse_deps <> [] then
217217+ let rev_deps_list = reverse_deps
218218+ |> List.map (fun dep ->
219219+ match parse_package_str dep with
220220+ | Some (dep_name, dep_version) ->
221221+ Printf.sprintf {|<li><a href="/packages/%s/%s">%s</a></li>|} dep_name dep_version dep
222222+ | None ->
223223+ Printf.sprintf "<li>%s</li>" dep)
224224+ |> String.concat "\n" in
225225+ Printf.sprintf {|<ul>%s</ul>|} rev_deps_list
226226+ else
227227+ {|<p style="color: var(--text-dim);">No packages depend on this one.</p>|}
228228+ in
229229+ Printf.sprintf {|
230230+ <div class="card">
231231+ <h2>Reverse Dependencies (%d)</h2>
232232+ %s
233233+ </div>
234234+ |} (List.length reverse_deps) rev_deps_content
235235+ in
236236+237237+ let content = Printf.sprintf {|
238238+ <h1>%s</h1>
239239+ <p><a href="/packages">← Back to packages</a></p>
240240+241241+ <div class="card">
242242+ <h2>Documentation</h2>
243243+ <p>%s</p>
244244+ <p><a href="/docs/p/%s/%s/doc/index.html">View Documentation →</a></p>
245245+ </div>
246246+247247+ %s
248248+ %s
249249+ %s
250250+251251+ <div class="card">
252252+ <h2>Other Versions</h2>
253253+ <ul>%s</ul>
254254+ </div>
255255+ |} package (Layout.badge `Success) name version build_info deps_section rev_deps_section versions_list
256256+ in
257257+ Layout.page ~title:package ~content
258258+259259+(** Combined build and doc logs page for a package *)
260260+let logs_page ~log_dir ~name ~version =
261261+ let package = name ^ "." ^ version in
262262+ let latest_run = Day10_web_data.Run_data.get_latest_run_id ~log_dir in
263263+ match latest_run with
264264+ | None ->
265265+ Layout.page ~title:(package ^ " Logs") ~content:(Printf.sprintf {|
266266+ <h1>%s Logs</h1>
267267+ <p><a href="/packages/%s/%s">← Back to package</a></p>
268268+ <div class="card">
269269+ <p>No run data available.</p>
270270+ </div>
271271+ |} package name version)
272272+ | Some run_id ->
273273+ let build_log = Day10_web_data.Run_data.read_build_log ~log_dir ~run_id ~package in
274274+ let doc_log = Day10_web_data.Run_data.read_doc_log ~log_dir ~run_id ~package in
275275+276276+ let build_section = match build_log with
277277+ | Some log ->
278278+ Printf.sprintf {|
279279+ <div class="card">
280280+ <h2>Build Log</h2>
281281+ <p><em>From run %s</em></p>
282282+ <pre>%s</pre>
283283+ </div>
284284+ |} run_id log
285285+ | None ->
286286+ {|<div class="card"><h2>Build Log</h2><p>No build log available for this package.</p></div>|}
287287+ in
288288+289289+ let doc_section = match doc_log with
290290+ | Some log ->
291291+ Printf.sprintf {|
292292+ <div class="card">
293293+ <h2>Documentation Log</h2>
294294+ <p><em>From run %s</em></p>
295295+ <pre>%s</pre>
296296+ </div>
297297+ |} run_id log
298298+ | None ->
299299+ {|<div class="card"><h2>Documentation Log</h2><p>No doc log available for this package.</p></div>|}
300300+ in
301301+302302+ let content = Printf.sprintf {|
303303+ <h1>%s Logs</h1>
304304+ <p><a href="/packages/%s/%s">← Back to package</a></p>
305305+ %s
306306+ %s
307307+ |} package name version build_section doc_section
308308+ in
309309+ Layout.page ~title:(package ^ " Logs") ~content
+158
day10/web/views/runs.ml
···11+(** Run history and detail pages *)
22+33+let list_page ~log_dir =
44+ let runs = Day10_web_data.Run_data.list_runs ~log_dir in
55+ let rows = runs |> List.map (fun run_id ->
66+ let summary = Day10_web_data.Run_data.read_summary ~log_dir ~run_id in
77+ match summary with
88+ | Some s ->
99+ Printf.sprintf {|
1010+ <tr>
1111+ <td><a href="/runs/%s">%s</a></td>
1212+ <td>%s</td>
1313+ <td>%.0fs</td>
1414+ <td>%d %s</td>
1515+ <td>%d %s</td>
1616+ <td>%d %s</td>
1717+ </tr>
1818+ |} run_id run_id
1919+ s.start_time
2020+ s.duration_seconds
2121+ s.build_success (if s.build_success > 0 then Layout.badge `Success else "")
2222+ s.build_failed (if s.build_failed > 0 then Layout.badge `Failed else "")
2323+ s.doc_success (if s.doc_success > 0 then Layout.badge `Success else "")
2424+ | None ->
2525+ Printf.sprintf {|<tr><td><a href="/runs/%s">%s</a></td><td colspan="5">Summary not available</td></tr>|} run_id run_id
2626+ ) |> String.concat "\n" in
2727+2828+ let content = if List.length runs = 0 then
2929+ {|<h1>Run History</h1><p class="card">No runs recorded yet.</p>|}
3030+ else
3131+ Printf.sprintf {|
3232+ <h1>Run History</h1>
3333+ <div class="card">
3434+ <table>
3535+ <tr>
3636+ <th>Run ID</th>
3737+ <th>Started</th>
3838+ <th>Duration</th>
3939+ <th>Builds</th>
4040+ <th>Failed</th>
4141+ <th>Docs</th>
4242+ </tr>
4343+ %s
4444+ </table>
4545+ </div>
4646+ |} rows
4747+ in
4848+ Layout.page ~title:"Run History" ~content
4949+5050+let detail_page ~log_dir ~run_id =
5151+ match Day10_web_data.Run_data.read_summary ~log_dir ~run_id with
5252+ | None ->
5353+ Layout.page ~title:"Run Not Found" ~content:{|
5454+ <h1>Run Not Found</h1>
5555+ <p class="card">The requested run could not be found.</p>
5656+ <p><a href="/runs">← Back to run history</a></p>
5757+ |}
5858+ | Some s ->
5959+ let failures_table = if List.length s.failures > 0 then
6060+ Printf.sprintf {|
6161+ <h2>Failures (%d)</h2>
6262+ <div class="card">
6363+ <table>
6464+ <tr><th>Package</th><th>Error</th><th>Logs</th></tr>
6565+ %s
6666+ </table>
6767+ </div>
6868+ |} (List.length s.failures)
6969+ (s.failures |> List.map (fun (pkg, err) ->
7070+ Printf.sprintf {|<tr>
7171+ <td>%s</td>
7272+ <td>%s</td>
7373+ <td>
7474+ <a href="/runs/%s/build/%s">build</a> |
7575+ <a href="/runs/%s/docs/%s">docs</a>
7676+ </td>
7777+ </tr>|} pkg err run_id pkg run_id pkg
7878+ ) |> String.concat "\n")
7979+ else ""
8080+ in
8181+8282+ let build_logs = Day10_web_data.Run_data.list_build_logs ~log_dir ~run_id in
8383+ let logs_section = if List.length build_logs > 0 then
8484+ Printf.sprintf {|
8585+ <h2>Build Logs (%d)</h2>
8686+ <div class="card">
8787+ <ul>%s</ul>
8888+ </div>
8989+ |} (List.length build_logs)
9090+ (build_logs |> List.map (fun pkg ->
9191+ Printf.sprintf {|<li><a href="/runs/%s/build/%s">%s</a></li>|} run_id pkg pkg
9292+ ) |> String.concat "\n")
9393+ else ""
9494+ in
9595+9696+ let content = Printf.sprintf {|
9797+ <h1>Run %s</h1>
9898+ <p><a href="/runs">← Back to run history</a></p>
9999+100100+ <div class="card">
101101+ <h2>Summary</h2>
102102+ <table>
103103+ <tr><td>Started</td><td>%s</td></tr>
104104+ <tr><td>Ended</td><td>%s</td></tr>
105105+ <tr><td>Duration</td><td>%.0f seconds</td></tr>
106106+ </table>
107107+ </div>
108108+109109+ <div class="card">
110110+ <h2>Results</h2>
111111+ <div class="grid">
112112+ %s %s %s %s %s %s %s
113113+ </div>
114114+ </div>
115115+116116+ %s
117117+ %s
118118+ |}
119119+ run_id
120120+ s.start_time s.end_time s.duration_seconds
121121+ (Layout.stat ~value:(string_of_int s.targets_requested) ~label:"Targets")
122122+ (Layout.stat ~value:(string_of_int s.solutions_found) ~label:"Solved")
123123+ (Layout.stat ~value:(string_of_int s.build_success) ~label:"Build OK")
124124+ (Layout.stat ~value:(string_of_int s.build_failed) ~label:"Build Failed")
125125+ (Layout.stat ~value:(string_of_int s.doc_success) ~label:"Docs OK")
126126+ (Layout.stat ~value:(string_of_int s.doc_failed) ~label:"Docs Failed")
127127+ (Layout.stat ~value:(string_of_int s.doc_skipped) ~label:"Docs Skipped")
128128+ failures_table
129129+ logs_section
130130+ in
131131+ Layout.page ~title:(Printf.sprintf "Run %s" run_id) ~content
132132+133133+let log_page ~log_dir ~run_id ~log_type ~package =
134134+ let content_opt = match log_type with
135135+ | `Build -> Day10_web_data.Run_data.read_build_log ~log_dir ~run_id ~package
136136+ | `Docs -> Day10_web_data.Run_data.read_doc_log ~log_dir ~run_id ~package
137137+ in
138138+ let type_str = match log_type with `Build -> "Build" | `Docs -> "Doc" in
139139+ match content_opt with
140140+ | None ->
141141+ Layout.page ~title:"Log Not Found" ~content:(Printf.sprintf {|
142142+ <h1>Log Not Found</h1>
143143+ <p class="card">The requested log could not be found. It may have been garbage collected.</p>
144144+ <p><a href="/runs/%s">← Back to run %s</a></p>
145145+ |} run_id run_id)
146146+ | Some content ->
147147+ let escaped = content
148148+ |> String.split_on_char '&' |> String.concat "&"
149149+ |> String.split_on_char '<' |> String.concat "<"
150150+ |> String.split_on_char '>' |> String.concat ">"
151151+ in
152152+ Layout.page ~title:(Printf.sprintf "%s Log: %s" type_str package) ~content:(Printf.sprintf {|
153153+ <h1>%s Log: %s</h1>
154154+ <p><a href="/runs/%s">← Back to run %s</a></p>
155155+ <div class="card">
156156+ <pre>%s</pre>
157157+ </div>
158158+ |} type_str package run_id run_id escaped)