[DEPRECATED] Go implementation of plcbundle
1#!/usr/bin/env bash
2set -euo pipefail
3
4# Colors
5RED='\033[0;31m'
6GREEN='\033[0;32m'
7YELLOW='\033[1;33m'
8BLUE='\033[0;34m'
9CYAN='\033[0;36m'
10NC='\033[0m'
11
12# Default configuration
13ENDPOINT="${ENDPOINT:-http://localhost:8080}"
14NUM_REQUESTS="${NUM_REQUESTS:-100}"
15CONCURRENCY="${CONCURRENCY:-4}"
16BUNDLE_DIR="${BUNDLE_DIR:-.}"
17ROUTE="${ROUTE:-document}"
18WARMUP="${WARMUP:-0}"
19OUTPUT_CSV="${OUTPUT_CSV:-}"
20VERBOSE="${VERBOSE:-0}"
21
22# Parse arguments
23while [[ $# -gt 0 ]]; do
24 case $1 in
25 -e|--endpoint) ENDPOINT="$2"; shift 2 ;;
26 -n|--requests) NUM_REQUESTS="$2"; shift 2 ;;
27 -c|--concurrency) CONCURRENCY="$2"; shift 2 ;;
28 -d|--dir) BUNDLE_DIR="$2"; shift 2 ;;
29 -r|--route) ROUTE="$2"; shift 2 ;;
30 -w|--warmup) WARMUP="$2"; shift 2 ;;
31 -o|--output) OUTPUT_CSV="$2"; shift 2 ;;
32 -v|--verbose) VERBOSE=1; shift ;;
33 -h|--help)
34 cat <<EOF
35Usage: $0 [options]
36
37Options:
38 -e, --endpoint URL HTTP endpoint (default: http://localhost:8080)
39 -n, --requests N Number of requests (default: 100)
40 -c, --concurrency N Concurrent requests (default: 4)
41 -d, --dir PATH Bundle directory (default: .)
42 -r, --route TYPE Route: document|data|audit (default: document)
43 -w, --warmup N Warmup requests (default: 0)
44 -o, --output FILE Save CSV results
45 -v, --verbose Verbose output
46
47Examples:
48 $0 -n 100 -c 4
49 $0 -n 1000 -c 10 -w 50 -v -o results.csv
50EOF
51 exit 0
52 ;;
53 *) echo -e "${RED}Unknown option: $1${NC}"; exit 1 ;;
54 esac
55done
56
57# Validate dependencies
58for cmd in curl jq zstd; do
59 if ! command -v $cmd &> /dev/null; then
60 echo -e "${RED}Error: '$cmd' not found. Install with: brew install $cmd${NC}"
61 exit 1
62 fi
63done
64
65echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
66echo -e "${BLUE} DID Resolver HTTP Endpoint Benchmark${NC}"
67echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
68echo ""
69echo "Configuration:"
70echo " Endpoint: $ENDPOINT"
71echo " Route: /$ROUTE"
72echo " Requests: $NUM_REQUESTS"
73echo " Concurrency: $CONCURRENCY"
74if [[ $WARMUP -gt 0 ]]; then
75 echo " Warmup: $WARMUP"
76fi
77echo " Bundle Dir: $BUNDLE_DIR"
78echo ""
79
80# Setup temp
81TEMP_DIR=$(mktemp -d)
82trap "rm -rf '$TEMP_DIR'" EXIT
83
84DID_LIST="$TEMP_DIR/dids.txt"
85RESULTS_FILE="$TEMP_DIR/results.csv"
86
87# Phase 1: Extract DIDs
88echo -e "${CYAN}━━━ Phase 1: Extracting DIDs ━━━${NC}"
89echo ""
90
91# Find bundles
92BUNDLES=()
93while IFS= read -r line; do
94 BUNDLES+=("$line")
95done < <(find "$BUNDLE_DIR" -name "*.jsonl.zst" -type f | sort)
96
97BUNDLE_COUNT=${#BUNDLES[@]}
98
99if [[ $BUNDLE_COUNT -eq 0 ]]; then
100 echo -e "${RED}Error: No bundles found in $BUNDLE_DIR${NC}"
101 exit 1
102fi
103
104echo " Found $BUNDLE_COUNT bundles"
105
106# Sample bundles
107SAMPLE_SIZE=10
108if [[ $BUNDLE_COUNT -lt $SAMPLE_SIZE ]]; then
109 SAMPLE_SIZE=$BUNDLE_COUNT
110fi
111
112echo " Extracting DIDs from $SAMPLE_SIZE bundles..."
113echo ""
114
115# Extract DIDs properly (not in subshell)
116> "$DID_LIST" # Create empty file
117
118for i in $(seq 0 $((SAMPLE_SIZE - 1))); do
119 BUNDLE="${BUNDLES[$i]}"
120 BUNDLE_NUM=$(basename "$BUNDLE" .jsonl.zst)
121
122 echo -ne "\r Bundle $BUNDLE_NUM ($((i + 1))/$SAMPLE_SIZE)... "
123
124 # Decompress and extract DIDs (show errors if verbose)
125 if [[ $VERBOSE -eq 1 ]]; then
126 zstd -dc "$BUNDLE" | jq -r '.did' >> "$DID_LIST"
127 else
128 zstd -dc "$BUNDLE" 2>/dev/null | jq -r '.did' 2>/dev/null >> "$DID_LIST" || {
129 echo -e "\n${YELLOW} Warning: Failed to process $BUNDLE_NUM${NC}"
130 continue
131 }
132 fi
133done
134
135echo ""
136
137# Deduplicate
138sort -u "$DID_LIST" -o "$DID_LIST"
139
140TOTAL_DIDS=$(wc -l < "$DID_LIST" | tr -d ' ')
141
142if [[ $TOTAL_DIDS -eq 0 ]]; then
143 echo -e "${RED}Error: No DIDs extracted. Check bundle files.${NC}"
144 if [[ $VERBOSE -eq 0 ]]; then
145 echo "Run with -v to see errors"
146 fi
147 exit 1
148fi
149
150echo -e "${GREEN} ✓ Extracted $TOTAL_DIDS unique DIDs${NC}"
151echo ""
152
153# Adjust request count if needed
154TOTAL_NEEDED=$((NUM_REQUESTS + WARMUP))
155if [[ $TOTAL_DIDS -lt $TOTAL_NEEDED ]]; then
156 echo -e "${YELLOW} Warning: Only $TOTAL_DIDS DIDs available${NC}"
157 if [[ $TOTAL_DIDS -le $WARMUP ]]; then
158 WARMUP=0
159 NUM_REQUESTS=$TOTAL_DIDS
160 else
161 NUM_REQUESTS=$((TOTAL_DIDS - WARMUP))
162 fi
163 TOTAL_NEEDED=$((NUM_REQUESTS + WARMUP))
164 echo -e "${YELLOW} Adjusted: $WARMUP warmup + $NUM_REQUESTS benchmark${NC}"
165 echo ""
166fi
167
168# Select random DIDs
169RANDOM_DIDS="$TEMP_DIR/random_dids.txt"
170shuf -n $TOTAL_NEEDED "$DID_LIST" > "$RANDOM_DIDS"
171
172# URL builder
173get_url() {
174 local did=$1
175 case $ROUTE in
176 data) echo "$ENDPOINT/$did/data" ;;
177 audit) echo "$ENDPOINT/$did/log/audit" ;;
178 *) echo "$ENDPOINT/$did" ;;
179 esac
180}
181
182# Warmup
183if [[ $WARMUP -gt 0 ]]; then
184 echo -e "${CYAN}━━━ Phase 2: Warmup ━━━${NC}"
185 echo ""
186 echo " Running $WARMUP warmup requests..."
187
188 head -n $WARMUP "$RANDOM_DIDS" | while read did; do
189 curl -s -o /dev/null "$(get_url "$did")" 2>/dev/null || true
190 done
191
192 echo -e "${GREEN} ✓ Complete${NC}"
193 echo ""
194
195 tail -n +$((WARMUP + 1)) "$RANDOM_DIDS" > "$TEMP_DIR/bench.txt"
196 RANDOM_DIDS="$TEMP_DIR/bench.txt"
197fi
198
199# Benchmark
200PHASE_NUM=2
201if [[ $WARMUP -gt 0 ]]; then
202 PHASE_NUM=3
203fi
204
205echo -e "${CYAN}━━━ Phase $PHASE_NUM: Benchmark ━━━${NC}"
206echo ""
207
208# CSV header
209echo "did,status,time_ms,size" > "$RESULTS_FILE"
210
211# Benchmark function
212benchmark_one() {
213 local did=$1
214 local url=$(get_url "$did")
215
216 local start=$(date +%s%N)
217 local output=$(curl -s -w "\n%{http_code}|%{size_download}" -o /dev/null "$url" 2>&1)
218 local end=$(date +%s%N)
219
220 local status=$(echo "$output" | tail -1 | cut -d'|' -f1)
221 local size=$(echo "$output" | tail -1 | cut -d'|' -f2)
222 local ms=$(( (end - start) / 1000000 ))
223
224 # Print URL if 500 error
225 if [[ "$status" == "500" ]]; then
226 echo -e "${RED}✗ 500 Error - URL: $url${NC}" >&2
227 fi
228
229 echo "$did,$status,$ms,$size"
230}
231
232export -f benchmark_one
233export -f get_url
234export ENDPOINT ROUTE RED NC
235
236# Execute
237echo " Running $NUM_REQUESTS requests (concurrency: $CONCURRENCY)..."
238
239BENCH_START=$(date +%s%N)
240
241if [[ $CONCURRENCY -eq 1 ]]; then
242 count=0
243 while IFS= read -r did; do
244 benchmark_one "$did" >> "$RESULTS_FILE"
245 count=$((count + 1))
246 if (( count % 10 == 0 )); then
247 printf "\r Progress: %d/%d (%d%%) " $count $NUM_REQUESTS $((count * 100 / NUM_REQUESTS))
248 fi
249 done < "$RANDOM_DIDS"
250 echo ""
251else
252 cat "$RANDOM_DIDS" | xargs -P $CONCURRENCY -I {} bash -c 'benchmark_one "$@"' _ {} >> "$RESULTS_FILE" 2>&1
253 echo -e "${GREEN} ✓ Complete${NC}"
254fi
255
256BENCH_END=$(date +%s%N)
257BENCH_SEC=$(awk "BEGIN {printf \"%.3f\", ($BENCH_END - $BENCH_START) / 1000000000}")
258
259echo ""
260
261# Results
262echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
263echo -e "${BLUE} Results${NC}"
264echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
265echo ""
266
267# Status codes
268echo "HTTP Status Codes:"
269echo "──────────────────"
270
271tail -n +2 "$RESULTS_FILE" | cut -d',' -f2 | sort | uniq -c | sort -rn | while read count code; do
272 case $code in
273 200) echo -e " ${GREEN}✓ 200${NC}: $count" ;;
274 404) echo -e " ${YELLOW}⚠ 404${NC}: $count" ;;
275 410) echo -e " ${YELLOW}⚠ 410${NC}: $count (deactivated)" ;;
276 500) echo -e " ${RED}✗ 500${NC}: $count" ;;
277 *) echo " ? $code: $count" ;;
278 esac
279done
280
281echo ""
282
283# Successful requests only
284SUCCESS_FILE="$TEMP_DIR/success.csv"
285tail -n +2 "$RESULTS_FILE" | awk -F',' '$2 == "200"' > "$SUCCESS_FILE" || true
286SUCCESS_COUNT=$(wc -l < "$SUCCESS_FILE" | tr -d ' ')
287
288if [[ $SUCCESS_COUNT -gt 0 ]]; then
289 echo "Response Times:"
290 echo "───────────────"
291
292 # Extract and sort times
293 TIMES="$TEMP_DIR/times.txt"
294 cut -d',' -f3 "$SUCCESS_FILE" | sort -n > "$TIMES"
295
296 # Stats
297 MIN=$(head -1 "$TIMES")
298 MAX=$(tail -1 "$TIMES")
299 MEAN=$(awk '{sum+=$1} END {printf "%.1f", sum/NR}' "$TIMES")
300
301 # Percentiles (safe calculation)
302 calc_line() {
303 local pct=$1
304 local line=$(awk "BEGIN {print int($SUCCESS_COUNT * $pct)}")
305 [[ $line -lt 1 ]] && line=1
306 echo $line
307 }
308
309 P50=$(sed -n "$(calc_line 0.50)p" "$TIMES")
310 P90=$(sed -n "$(calc_line 0.90)p" "$TIMES")
311 P95=$(sed -n "$(calc_line 0.95)p" "$TIMES")
312 P99=$(sed -n "$(calc_line 0.99)p" "$TIMES")
313
314 echo " Count: $SUCCESS_COUNT"
315 echo " Min: ${MIN} ms"
316 echo " Mean: ${MEAN} ms"
317 echo " p50: ${P50} ms"
318 echo " p90: ${P90} ms"
319 echo " p95: ${P95} ms"
320 echo " p99: ${P99} ms"
321 echo " Max: ${MAX} ms"
322 echo ""
323
324 echo "Throughput:"
325 echo "───────────"
326 RPS=$(awk "BEGIN {printf \"%.2f\", $SUCCESS_COUNT / $BENCH_SEC}")
327 echo " Duration: ${BENCH_SEC}s"
328 echo " Requests/sec: $RPS"
329 echo ""
330
331 # Distribution
332 echo "Distribution:"
333 echo "─────────────"
334
335 ranges=(
336 "0:5:<5ms"
337 "5:10:5-10ms"
338 "10:20:10-20ms"
339 "20:50:20-50ms"
340 "50:100:50-100ms"
341 "100:500:100-500ms"
342 "500:999999:>500ms"
343 )
344
345 for range in "${ranges[@]}"; do
346 IFS=':' read -r min max label <<< "$range"
347 count=$(awk -F',' -v min=$min -v max=$max '$3 >= min && $3 < max {c++} END {print c+0}' "$SUCCESS_FILE")
348
349 if [[ $count -gt 0 ]]; then
350 pct=$(awk "BEGIN {printf \"%.1f\", $count/$SUCCESS_COUNT*100}")
351 bar_len=$(awk "BEGIN {printf \"%.0f\", $count/$SUCCESS_COUNT*30}")
352 bar=$(printf '█%.0s' $(seq 1 $bar_len))
353 printf " %-10s [%-30s] %4d (%5.1f%%)\n" "$label" "$bar" $count $pct
354 fi
355 done
356
357else
358 echo -e "${RED}No successful requests!${NC}"
359 echo ""
360 echo "Debugging info:"
361 echo " Test connection: curl -v $ENDPOINT/status"
362 echo " Check results: cat $RESULTS_FILE"
363fi
364
365echo ""
366echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
367
368# Save output
369if [[ -n "$OUTPUT_CSV" ]]; then
370 cp "$RESULTS_FILE" "$OUTPUT_CSV"
371 echo -e "${GREEN}✓ Saved to: $OUTPUT_CSV${NC}"
372 echo ""
373fi
374
375# Verbose extras
376if [[ $VERBOSE -eq 1 ]] && [[ $SUCCESS_COUNT -gt 0 ]]; then
377 echo ""
378 echo "Top 5 slowest:"
379 tail -n +2 "$RESULTS_FILE" | awk -F',' '$2=="200"' | sort -t',' -k3 -rn | head -5 | \
380 awk -F',' '{printf " %5d ms - %s\n", $3, $1}'
381 echo ""
382 echo "Top 5 fastest:"
383 tail -n +2 "$RESULTS_FILE" | awk -F',' '$2=="200"' | sort -t',' -k3 -n | head -5 | \
384 awk -F',' '{printf " %5d ms - %s\n", $3, $1}'
385fi
386
387echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
388
389# Exit code
390if [[ ${SUCCESS_COUNT:-0} -eq 0 ]]; then
391 exit 1
392fi