tangled
alpha
login
or
join now
atscan.net
/
plcbundle-go
1
fork
atom
[DEPRECATED] Go implementation of plcbundle
1
fork
atom
overview
issues
pulls
pipelines
add benchs
tree.fail
4 months ago
0eb46667
645a6779
+462
2 changed files
expand all
collapse all
unified
split
scripts
did-resolve-benchmark-http-advanced.sh
resolve-benchmark-simple.sh
+387
scripts/did-resolve-benchmark-http-advanced.sh
···
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
···
1
+
#!/usr/bin/env bash
2
+
set -euo pipefail
3
+
4
+
# Colors
5
+
RED='\033[0;31m'
6
+
GREEN='\033[0;32m'
7
+
YELLOW='\033[1;33m'
8
+
BLUE='\033[0;34m'
9
+
CYAN='\033[0;36m'
10
+
NC='\033[0m'
11
+
12
+
# Default configuration
13
+
ENDPOINT="${ENDPOINT:-http://localhost:8080}"
14
+
NUM_REQUESTS="${NUM_REQUESTS:-100}"
15
+
CONCURRENCY="${CONCURRENCY:-4}"
16
+
BUNDLE_DIR="${BUNDLE_DIR:-.}"
17
+
ROUTE="${ROUTE:-document}"
18
+
WARMUP="${WARMUP:-0}"
19
+
OUTPUT_CSV="${OUTPUT_CSV:-}"
20
+
VERBOSE="${VERBOSE:-0}"
21
+
22
+
# Parse arguments
23
+
while [[ $# -gt 0 ]]; do
24
+
case $1 in
25
+
-e|--endpoint) ENDPOINT="$2"; shift 2 ;;
26
+
-n|--requests) NUM_REQUESTS="$2"; shift 2 ;;
27
+
-c|--concurrency) CONCURRENCY="$2"; shift 2 ;;
28
+
-d|--dir) BUNDLE_DIR="$2"; shift 2 ;;
29
+
-r|--route) ROUTE="$2"; shift 2 ;;
30
+
-w|--warmup) WARMUP="$2"; shift 2 ;;
31
+
-o|--output) OUTPUT_CSV="$2"; shift 2 ;;
32
+
-v|--verbose) VERBOSE=1; shift ;;
33
+
-h|--help)
34
+
cat <<EOF
35
+
Usage: $0 [options]
36
+
37
+
Options:
38
+
-e, --endpoint URL HTTP endpoint (default: http://localhost:8080)
39
+
-n, --requests N Number of requests (default: 100)
40
+
-c, --concurrency N Concurrent requests (default: 4)
41
+
-d, --dir PATH Bundle directory (default: .)
42
+
-r, --route TYPE Route: document|data|audit (default: document)
43
+
-w, --warmup N Warmup requests (default: 0)
44
+
-o, --output FILE Save CSV results
45
+
-v, --verbose Verbose output
46
+
47
+
Examples:
48
+
$0 -n 100 -c 4
49
+
$0 -n 1000 -c 10 -w 50 -v -o results.csv
50
+
EOF
51
+
exit 0
52
+
;;
53
+
*) echo -e "${RED}Unknown option: $1${NC}"; exit 1 ;;
54
+
esac
55
+
done
56
+
57
+
# Validate dependencies
58
+
for cmd in curl jq zstd; do
59
+
if ! command -v $cmd &> /dev/null; then
60
+
echo -e "${RED}Error: '$cmd' not found. Install with: brew install $cmd${NC}"
61
+
exit 1
62
+
fi
63
+
done
64
+
65
+
echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
66
+
echo -e "${BLUE} DID Resolver HTTP Endpoint Benchmark${NC}"
67
+
echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
68
+
echo ""
69
+
echo "Configuration:"
70
+
echo " Endpoint: $ENDPOINT"
71
+
echo " Route: /$ROUTE"
72
+
echo " Requests: $NUM_REQUESTS"
73
+
echo " Concurrency: $CONCURRENCY"
74
+
if [[ $WARMUP -gt 0 ]]; then
75
+
echo " Warmup: $WARMUP"
76
+
fi
77
+
echo " Bundle Dir: $BUNDLE_DIR"
78
+
echo ""
79
+
80
+
# Setup temp
81
+
TEMP_DIR=$(mktemp -d)
82
+
trap "rm -rf '$TEMP_DIR'" EXIT
83
+
84
+
DID_LIST="$TEMP_DIR/dids.txt"
85
+
RESULTS_FILE="$TEMP_DIR/results.csv"
86
+
87
+
# Phase 1: Extract DIDs
88
+
echo -e "${CYAN}━━━ Phase 1: Extracting DIDs ━━━${NC}"
89
+
echo ""
90
+
91
+
# Find bundles
92
+
BUNDLES=()
93
+
while IFS= read -r line; do
94
+
BUNDLES+=("$line")
95
+
done < <(find "$BUNDLE_DIR" -name "*.jsonl.zst" -type f | sort)
96
+
97
+
BUNDLE_COUNT=${#BUNDLES[@]}
98
+
99
+
if [[ $BUNDLE_COUNT -eq 0 ]]; then
100
+
echo -e "${RED}Error: No bundles found in $BUNDLE_DIR${NC}"
101
+
exit 1
102
+
fi
103
+
104
+
echo " Found $BUNDLE_COUNT bundles"
105
+
106
+
# Sample bundles
107
+
SAMPLE_SIZE=10
108
+
if [[ $BUNDLE_COUNT -lt $SAMPLE_SIZE ]]; then
109
+
SAMPLE_SIZE=$BUNDLE_COUNT
110
+
fi
111
+
112
+
echo " Extracting DIDs from $SAMPLE_SIZE bundles..."
113
+
echo ""
114
+
115
+
# Extract DIDs properly (not in subshell)
116
+
> "$DID_LIST" # Create empty file
117
+
118
+
for i in $(seq 0 $((SAMPLE_SIZE - 1))); do
119
+
BUNDLE="${BUNDLES[$i]}"
120
+
BUNDLE_NUM=$(basename "$BUNDLE" .jsonl.zst)
121
+
122
+
echo -ne "\r Bundle $BUNDLE_NUM ($((i + 1))/$SAMPLE_SIZE)... "
123
+
124
+
# Decompress and extract DIDs (show errors if verbose)
125
+
if [[ $VERBOSE -eq 1 ]]; then
126
+
zstd -dc "$BUNDLE" | jq -r '.did' >> "$DID_LIST"
127
+
else
128
+
zstd -dc "$BUNDLE" 2>/dev/null | jq -r '.did' 2>/dev/null >> "$DID_LIST" || {
129
+
echo -e "\n${YELLOW} Warning: Failed to process $BUNDLE_NUM${NC}"
130
+
continue
131
+
}
132
+
fi
133
+
done
134
+
135
+
echo ""
136
+
137
+
# Deduplicate
138
+
sort -u "$DID_LIST" -o "$DID_LIST"
139
+
140
+
TOTAL_DIDS=$(wc -l < "$DID_LIST" | tr -d ' ')
141
+
142
+
if [[ $TOTAL_DIDS -eq 0 ]]; then
143
+
echo -e "${RED}Error: No DIDs extracted. Check bundle files.${NC}"
144
+
if [[ $VERBOSE -eq 0 ]]; then
145
+
echo "Run with -v to see errors"
146
+
fi
147
+
exit 1
148
+
fi
149
+
150
+
echo -e "${GREEN} ✓ Extracted $TOTAL_DIDS unique DIDs${NC}"
151
+
echo ""
152
+
153
+
# Adjust request count if needed
154
+
TOTAL_NEEDED=$((NUM_REQUESTS + WARMUP))
155
+
if [[ $TOTAL_DIDS -lt $TOTAL_NEEDED ]]; then
156
+
echo -e "${YELLOW} Warning: Only $TOTAL_DIDS DIDs available${NC}"
157
+
if [[ $TOTAL_DIDS -le $WARMUP ]]; then
158
+
WARMUP=0
159
+
NUM_REQUESTS=$TOTAL_DIDS
160
+
else
161
+
NUM_REQUESTS=$((TOTAL_DIDS - WARMUP))
162
+
fi
163
+
TOTAL_NEEDED=$((NUM_REQUESTS + WARMUP))
164
+
echo -e "${YELLOW} Adjusted: $WARMUP warmup + $NUM_REQUESTS benchmark${NC}"
165
+
echo ""
166
+
fi
167
+
168
+
# Select random DIDs
169
+
RANDOM_DIDS="$TEMP_DIR/random_dids.txt"
170
+
shuf -n $TOTAL_NEEDED "$DID_LIST" > "$RANDOM_DIDS"
171
+
172
+
# URL builder
173
+
get_url() {
174
+
local did=$1
175
+
case $ROUTE in
176
+
data) echo "$ENDPOINT/$did/data" ;;
177
+
audit) echo "$ENDPOINT/$did/log/audit" ;;
178
+
*) echo "$ENDPOINT/$did" ;;
179
+
esac
180
+
}
181
+
182
+
# Warmup
183
+
if [[ $WARMUP -gt 0 ]]; then
184
+
echo -e "${CYAN}━━━ Phase 2: Warmup ━━━${NC}"
185
+
echo ""
186
+
echo " Running $WARMUP warmup requests..."
187
+
188
+
head -n $WARMUP "$RANDOM_DIDS" | while read did; do
189
+
curl -s -o /dev/null "$(get_url "$did")" 2>/dev/null || true
190
+
done
191
+
192
+
echo -e "${GREEN} ✓ Complete${NC}"
193
+
echo ""
194
+
195
+
tail -n +$((WARMUP + 1)) "$RANDOM_DIDS" > "$TEMP_DIR/bench.txt"
196
+
RANDOM_DIDS="$TEMP_DIR/bench.txt"
197
+
fi
198
+
199
+
# Benchmark
200
+
PHASE_NUM=2
201
+
if [[ $WARMUP -gt 0 ]]; then
202
+
PHASE_NUM=3
203
+
fi
204
+
205
+
echo -e "${CYAN}━━━ Phase $PHASE_NUM: Benchmark ━━━${NC}"
206
+
echo ""
207
+
208
+
# CSV header
209
+
echo "did,status,time_ms,size" > "$RESULTS_FILE"
210
+
211
+
# Benchmark function
212
+
benchmark_one() {
213
+
local did=$1
214
+
local url=$(get_url "$did")
215
+
216
+
local start=$(date +%s%N)
217
+
local output=$(curl -s -w "\n%{http_code}|%{size_download}" -o /dev/null "$url" 2>&1)
218
+
local end=$(date +%s%N)
219
+
220
+
local status=$(echo "$output" | tail -1 | cut -d'|' -f1)
221
+
local size=$(echo "$output" | tail -1 | cut -d'|' -f2)
222
+
local ms=$(( (end - start) / 1000000 ))
223
+
224
+
echo "$did,$status,$ms,$size"
225
+
}
226
+
227
+
export -f benchmark_one
228
+
export -f get_url
229
+
export ENDPOINT ROUTE
230
+
231
+
# Execute
232
+
echo " Running $NUM_REQUESTS requests (concurrency: $CONCURRENCY)..."
233
+
234
+
BENCH_START=$(date +%s%N)
235
+
236
+
if [[ $CONCURRENCY -eq 1 ]]; then
237
+
count=0
238
+
while IFS= read -r did; do
239
+
benchmark_one "$did" >> "$RESULTS_FILE"
240
+
count=$((count + 1))
241
+
if (( count % 10 == 0 )); then
242
+
printf "\r Progress: %d/%d (%d%%) " $count $NUM_REQUESTS $((count * 100 / NUM_REQUESTS))
243
+
fi
244
+
done < "$RANDOM_DIDS"
245
+
echo ""
246
+
else
247
+
cat "$RANDOM_DIDS" | xargs -P $CONCURRENCY -I {} bash -c 'benchmark_one "$@"' _ {} >> "$RESULTS_FILE" 2>&1
248
+
echo -e "${GREEN} ✓ Complete${NC}"
249
+
fi
250
+
251
+
BENCH_END=$(date +%s%N)
252
+
BENCH_SEC=$(awk "BEGIN {printf \"%.3f\", ($BENCH_END - $BENCH_START) / 1000000000}")
253
+
254
+
echo ""
255
+
256
+
# Results
257
+
echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
258
+
echo -e "${BLUE} Results${NC}"
259
+
echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
260
+
echo ""
261
+
262
+
# Status codes
263
+
echo "HTTP Status Codes:"
264
+
echo "──────────────────"
265
+
266
+
tail -n +2 "$RESULTS_FILE" | cut -d',' -f2 | sort | uniq -c | sort -rn | while read count code; do
267
+
case $code in
268
+
200) echo -e " ${GREEN}✓ 200${NC}: $count" ;;
269
+
404) echo -e " ${YELLOW}⚠ 404${NC}: $count" ;;
270
+
410) echo -e " ${YELLOW}⚠ 410${NC}: $count (deactivated)" ;;
271
+
500) echo -e " ${RED}✗ 500${NC}: $count" ;;
272
+
*) echo " ? $code: $count" ;;
273
+
esac
274
+
done
275
+
276
+
echo ""
277
+
278
+
# Successful requests only
279
+
SUCCESS_FILE="$TEMP_DIR/success.csv"
280
+
tail -n +2 "$RESULTS_FILE" | awk -F',' '$2 == "200"' > "$SUCCESS_FILE" || true
281
+
SUCCESS_COUNT=$(wc -l < "$SUCCESS_FILE" | tr -d ' ')
282
+
283
+
if [[ $SUCCESS_COUNT -gt 0 ]]; then
284
+
echo "Response Times:"
285
+
echo "───────────────"
286
+
287
+
# Extract and sort times
288
+
TIMES="$TEMP_DIR/times.txt"
289
+
cut -d',' -f3 "$SUCCESS_FILE" | sort -n > "$TIMES"
290
+
291
+
# Stats
292
+
MIN=$(head -1 "$TIMES")
293
+
MAX=$(tail -1 "$TIMES")
294
+
MEAN=$(awk '{sum+=$1} END {printf "%.1f", sum/NR}' "$TIMES")
295
+
296
+
# Percentiles (safe calculation)
297
+
calc_line() {
298
+
local pct=$1
299
+
local line=$(awk "BEGIN {print int($SUCCESS_COUNT * $pct)}")
300
+
[[ $line -lt 1 ]] && line=1
301
+
echo $line
302
+
}
303
+
304
+
P50=$(sed -n "$(calc_line 0.50)p" "$TIMES")
305
+
P90=$(sed -n "$(calc_line 0.90)p" "$TIMES")
306
+
P95=$(sed -n "$(calc_line 0.95)p" "$TIMES")
307
+
P99=$(sed -n "$(calc_line 0.99)p" "$TIMES")
308
+
309
+
echo " Count: $SUCCESS_COUNT"
310
+
echo " Min: ${MIN} ms"
311
+
echo " Mean: ${MEAN} ms"
312
+
echo " p50: ${P50} ms"
313
+
echo " p90: ${P90} ms"
314
+
echo " p95: ${P95} ms"
315
+
echo " p99: ${P99} ms"
316
+
echo " Max: ${MAX} ms"
317
+
echo ""
318
+
319
+
echo "Throughput:"
320
+
echo "───────────"
321
+
RPS=$(awk "BEGIN {printf \"%.2f\", $SUCCESS_COUNT / $BENCH_SEC}")
322
+
echo " Duration: ${BENCH_SEC}s"
323
+
echo " Requests/sec: $RPS"
324
+
echo ""
325
+
326
+
# Distribution
327
+
echo "Distribution:"
328
+
echo "─────────────"
329
+
330
+
ranges=(
331
+
"0:5:<5ms"
332
+
"5:10:5-10ms"
333
+
"10:20:10-20ms"
334
+
"20:50:20-50ms"
335
+
"50:100:50-100ms"
336
+
"100:500:100-500ms"
337
+
"500:999999:>500ms"
338
+
)
339
+
340
+
for range in "${ranges[@]}"; do
341
+
IFS=':' read -r min max label <<< "$range"
342
+
count=$(awk -F',' -v min=$min -v max=$max '$3 >= min && $3 < max {c++} END {print c+0}' "$SUCCESS_FILE")
343
+
344
+
if [[ $count -gt 0 ]]; then
345
+
pct=$(awk "BEGIN {printf \"%.1f\", $count/$SUCCESS_COUNT*100}")
346
+
bar_len=$(awk "BEGIN {printf \"%.0f\", $count/$SUCCESS_COUNT*30}")
347
+
bar=$(printf '█%.0s' $(seq 1 $bar_len))
348
+
printf " %-10s [%-30s] %4d (%5.1f%%)\n" "$label" "$bar" $count $pct
349
+
fi
350
+
done
351
+
352
+
else
353
+
echo -e "${RED}No successful requests!${NC}"
354
+
echo ""
355
+
echo "Debugging info:"
356
+
echo " Test connection: curl -v $ENDPOINT/status"
357
+
echo " Check results: cat $RESULTS_FILE"
358
+
fi
359
+
360
+
echo ""
361
+
echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
362
+
363
+
# Save output
364
+
if [[ -n "$OUTPUT_CSV" ]]; then
365
+
cp "$RESULTS_FILE" "$OUTPUT_CSV"
366
+
echo -e "${GREEN}✓ Saved to: $OUTPUT_CSV${NC}"
367
+
echo ""
368
+
fi
369
+
370
+
# Verbose extras
371
+
if [[ $VERBOSE -eq 1 ]] && [[ $SUCCESS_COUNT -gt 0 ]]; then
372
+
echo ""
373
+
echo "Top 5 slowest:"
374
+
tail -n +2 "$RESULTS_FILE" | awk -F',' '$2=="200"' | sort -t',' -k3 -rn | head -5 | \
375
+
awk -F',' '{printf " %5d ms - %s\n", $3, $1}'
376
+
echo ""
377
+
echo "Top 5 fastest:"
378
+
tail -n +2 "$RESULTS_FILE" | awk -F',' '$2=="200"' | sort -t',' -k3 -n | head -5 | \
379
+
awk -F',' '{printf " %5d ms - %s\n", $3, $1}'
380
+
fi
381
+
382
+
echo -e "${BLUE}════════════════════════════════════════════════════════════════${NC}"
383
+
384
+
# Exit code
385
+
if [[ ${SUCCESS_COUNT:-0} -eq 0 ]]; then
386
+
exit 1
387
+
fi
+75
scripts/resolve-benchmark-simple.sh
···
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
···
1
+
#!/usr/bin/env bash
2
+
3
+
ENDPOINT="${1:-http://localhost:8080}"
4
+
NUM="${2:-100}"
5
+
DIR="${3:-.}"
6
+
7
+
echo "=== DID Resolver Benchmark ==="
8
+
echo "Endpoint: $ENDPOINT"
9
+
echo "Requests: $NUM"
10
+
echo ""
11
+
12
+
# Extract DIDs
13
+
echo "Extracting DIDs..."
14
+
TEMP=$(mktemp)
15
+
trap "rm -f $TEMP ${TEMP}.*" EXIT
16
+
17
+
find "$DIR" -name "*.jsonl.zst" | head -5 | while read f; do
18
+
zstd -dc "$f" | jq -r '.did'
19
+
done | sort -u | shuf -n $NUM > "$TEMP"
20
+
21
+
TOTAL=$(wc -l < "$TEMP" | tr -d ' ')
22
+
echo "Testing $TOTAL DIDs"
23
+
echo ""
24
+
25
+
# Run benchmark
26
+
echo "Benchmarking..."
27
+
SUCCESS=0
28
+
FAILED=0
29
+
30
+
while read did; do
31
+
START=$(date +%s%N)
32
+
CODE=$(curl -s -o /dev/null -w "%{http_code}" "$ENDPOINT/$did" 2>/dev/null)
33
+
END=$(date +%s%N)
34
+
35
+
MS=$(( (END - START) / 1000000 ))
36
+
37
+
if [[ $CODE == "200" ]]; then
38
+
SUCCESS=$((SUCCESS + 1))
39
+
echo "$MS" >> "${TEMP}.times"
40
+
else
41
+
FAILED=$((FAILED + 1))
42
+
fi
43
+
44
+
DONE=$((SUCCESS + FAILED))
45
+
if (( DONE % 10 == 0 )); then
46
+
printf "\r %d/%d " $DONE $TOTAL
47
+
fi
48
+
done < "$TEMP"
49
+
50
+
echo ""
51
+
echo ""
52
+
echo "Results:"
53
+
echo " Success: $SUCCESS"
54
+
echo " Failed: $FAILED"
55
+
56
+
if [[ $SUCCESS -gt 0 ]]; then
57
+
sort -n "${TEMP}.times" -o "${TEMP}.times"
58
+
59
+
MIN=$(head -1 "${TEMP}.times")
60
+
MAX=$(tail -1 "${TEMP}.times")
61
+
AVG=$(awk '{s+=$1} END {printf "%.0f", s/NR}' "${TEMP}.times")
62
+
63
+
# Percentiles
64
+
P50=$(sed -n "$(awk "BEGIN {print int($SUCCESS * 0.50)}")p" "${TEMP}.times")
65
+
P90=$(sed -n "$(awk "BEGIN {print int($SUCCESS * 0.90)}")p" "${TEMP}.times")
66
+
P95=$(sed -n "$(awk "BEGIN {print int($SUCCESS * 0.95)}")p" "${TEMP}.times")
67
+
68
+
echo ""
69
+
echo " Min: ${MIN}ms"
70
+
echo " Avg: ${AVG}ms"
71
+
echo " p50: ${P50}ms"
72
+
echo " p90: ${P90}ms"
73
+
echo " p95: ${P95}ms"
74
+
echo " Max: ${MAX}ms"
75
+
fi