bench: Cloud benchmark

This commit is contained in:
2026-03-19 12:50:13 +01:00
parent f0ef42fe3f
commit 833c85f4ac
9 changed files with 2036 additions and 929 deletions

View File

@@ -548,6 +548,51 @@ Run it with:
mix bench mix bench
``` ```
### Cloud benchmark (Hetzner Cloud)
For distributed runs (one server node + multiple client nodes), use:
```bash
./scripts/run_bench_cloud.sh
```
or invoke the orchestrator directly:
```bash
node scripts/cloud_bench_orchestrate.mjs
```
Prerequisites:
- [`hcloud`](https://github.com/hetznercloud/cli) CLI installed
- Hetzner Cloud token exported as `HCLOUD_TOKEN`
- local `docker`, `git`, `ssh`, and `scp` available
Example:
```bash
export HCLOUD_TOKEN=...
./scripts/run_bench_cloud.sh --quick
```
Outputs:
- raw client logs per run: `bench/cloud_artifacts/<run_id>/...`
- JSONL history entries (local + cloud): `bench/history.jsonl`
Useful history/render commands:
```bash
# List available machines and runs in history
./scripts/run_bench_update.sh --list
# Regenerate chart + README table for a machine
./scripts/run_bench_update.sh <machine_id>
# Regenerate from all machines
./scripts/run_bench_update.sh all
```
Current comparison results: Current comparison results:
| metric | parrhesia-pg | parrhesia-mem | strfry | nostr-rs-relay | mem/pg | strfry/pg | nostr-rs/pg | | metric | parrhesia-pg | parrhesia-mem | strfry | nostr-rs-relay | mem/pg | strfry/pg | nostr-rs/pg |

View File

@@ -1,5 +1,5 @@
{"timestamp":"2026-03-18T22:14:37Z","machine_id":"agent","git_tag":"v0.2.0","git_commit":"b20dbf6","runs":3,"versions":{"parrhesia":"0.2.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":14.666666666666666,"connect_max_ms":25.666666666666668,"echo_tps":77133,"echo_mibs":42.233333333333334,"event_tps":1602.6666666666667,"event_mibs":1.0666666666666667,"req_tps":2418,"req_mibs":12.5},"parrhesia-memory":{"connect_avg_ms":9,"connect_max_ms":16,"echo_tps":64218.333333333336,"echo_mibs":35.166666666666664,"event_tps":1578.3333333333333,"event_mibs":1,"req_tps":2431.3333333333335,"req_mibs":12.633333333333333},"strfry":{"connect_avg_ms":3.3333333333333335,"connect_max_ms":6,"echo_tps":63682.666666666664,"echo_mibs":35.6,"event_tps":3477.3333333333335,"event_mibs":2.2333333333333334,"req_tps":1804,"req_mibs":11.733333333333334},"nostr-rs-relay":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4.333333333333333,"echo_tps":160009,"echo_mibs":87.63333333333333,"event_tps":762,"event_mibs":0.4666666666666666,"req_tps":831,"req_mibs":2.2333333333333334}}} {"schema_version":2,"timestamp":"2026-03-18T21:35:03Z","machine_id":"agent","git_tag":"v0.6.0","git_commit":"7b337d9","runs":3,"versions":{"parrhesia":"0.6.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":26.666666666666668,"connect_max_ms":45.333333333333336,"echo_tps":68100.33333333333,"echo_mibs":37.233333333333334,"event_tps":1647.3333333333333,"event_mibs":1.0666666666666667,"req_tps":3576.6666666666665,"req_mibs":18.833333333333332},"parrhesia-memory":{"connect_avg_ms":14.666666666666666,"connect_max_ms":24.333333333333332,"echo_tps":55978,"echo_mibs":30.633333333333336,"event_tps":882,"event_mibs":0.5666666666666668,"req_tps":6888,"req_mibs":36.06666666666666},"strfry":{"connect_avg_ms":3,"connect_max_ms":4.666666666666667,"echo_tps":67718.33333333333,"echo_mibs":37.86666666666667,"event_tps":3548.3333333333335,"event_mibs":2.3,"req_tps":1808,"req_mibs":11.699999999999998},"nostr-rs-relay":{"connect_avg_ms":2,"connect_max_ms":3.3333333333333335,"echo_tps":166178,"echo_mibs":91.03333333333335,"event_tps":787,"event_mibs":0.5,"req_tps":860.6666666666666,"req_mibs":2.4}},"run_id":"local-2026-03-18T21:35:03Z-agent-7b337d9","source":{"kind":"local","git_tag":"v0.6.0","git_commit":"7b337d9"},"infra":{"provider":"local"},"bench":{"runs":3,"targets":["parrhesia-pg","parrhesia-memory","strfry","nostr-rs-relay"]}}
{"timestamp":"2026-03-18T22:22:12Z","machine_id":"agent","git_tag":"v0.3.0","git_commit":"8c8d5a8","runs":3,"versions":{"parrhesia":"0.3.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":13,"connect_max_ms":21.666666666666668,"echo_tps":70703.33333333333,"echo_mibs":38.7,"event_tps":1970.6666666666667,"event_mibs":1.3,"req_tps":3614,"req_mibs":20.966666666666665},"parrhesia-memory":{"connect_avg_ms":13,"connect_max_ms":22.333333333333332,"echo_tps":60452.333333333336,"echo_mibs":33.1,"event_tps":1952.6666666666667,"event_mibs":1.3,"req_tps":3616,"req_mibs":20.766666666666666},"strfry":{"connect_avg_ms":3.6666666666666665,"connect_max_ms":6,"echo_tps":63128.666666666664,"echo_mibs":35.300000000000004,"event_tps":3442,"event_mibs":2.2333333333333334,"req_tps":1804,"req_mibs":11.699999999999998},"nostr-rs-relay":{"connect_avg_ms":2,"connect_max_ms":3.3333333333333335,"echo_tps":164995.33333333334,"echo_mibs":90.36666666666667,"event_tps":761.6666666666666,"event_mibs":0.5,"req_tps":846.3333333333334,"req_mibs":2.333333333333333}}} {"schema_version":2,"timestamp":"2026-03-18T22:14:37Z","machine_id":"agent","git_tag":"v0.2.0","git_commit":"b20dbf6","runs":3,"versions":{"parrhesia":"0.2.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":14.666666666666666,"connect_max_ms":25.666666666666668,"echo_tps":77133,"echo_mibs":42.233333333333334,"event_tps":1602.6666666666667,"event_mibs":1.0666666666666667,"req_tps":2418,"req_mibs":12.5},"parrhesia-memory":{"connect_avg_ms":9,"connect_max_ms":16,"echo_tps":64218.333333333336,"echo_mibs":35.166666666666664,"event_tps":1578.3333333333333,"event_mibs":1,"req_tps":2431.3333333333335,"req_mibs":12.633333333333333},"strfry":{"connect_avg_ms":3.3333333333333335,"connect_max_ms":6,"echo_tps":63682.666666666664,"echo_mibs":35.6,"event_tps":3477.3333333333335,"event_mibs":2.2333333333333334,"req_tps":1804,"req_mibs":11.733333333333334},"nostr-rs-relay":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4.333333333333333,"echo_tps":160009,"echo_mibs":87.63333333333333,"event_tps":762,"event_mibs":0.4666666666666666,"req_tps":831,"req_mibs":2.2333333333333334}},"run_id":"local-2026-03-18T22:14:37Z-agent-b20dbf6","source":{"kind":"local","git_tag":"v0.2.0","git_commit":"b20dbf6"},"infra":{"provider":"local"},"bench":{"runs":3,"targets":["parrhesia-pg","parrhesia-memory","strfry","nostr-rs-relay"]}}
{"timestamp":"2026-03-18T22:30:08Z","machine_id":"agent","git_tag":"v0.4.0","git_commit":"b86b5db","runs":3,"versions":{"parrhesia":"0.4.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":11.333333333333334,"connect_max_ms":20.666666666666668,"echo_tps":69139.33333333333,"echo_mibs":37.833333333333336,"event_tps":1938.6666666666667,"event_mibs":1.3,"req_tps":4619.666666666667,"req_mibs":26.266666666666666},"parrhesia-memory":{"connect_avg_ms":10,"connect_max_ms":17.333333333333332,"echo_tps":62715.333333333336,"echo_mibs":34.333333333333336,"event_tps":1573,"event_mibs":1.0333333333333334,"req_tps":4768,"req_mibs":23.733333333333334},"strfry":{"connect_avg_ms":3.3333333333333335,"connect_max_ms":6,"echo_tps":60956.666666666664,"echo_mibs":34.06666666666667,"event_tps":3380.6666666666665,"event_mibs":2.2,"req_tps":1820.3333333333333,"req_mibs":11.800000000000002},"nostr-rs-relay":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4.333333333333333,"echo_tps":161165.33333333334,"echo_mibs":88.26666666666665,"event_tps":768,"event_mibs":0.5,"req_tps":847.3333333333334,"req_mibs":2.3000000000000003}}} {"schema_version":2,"timestamp":"2026-03-18T22:22:12Z","machine_id":"agent","git_tag":"v0.3.0","git_commit":"8c8d5a8","runs":3,"versions":{"parrhesia":"0.3.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":13,"connect_max_ms":21.666666666666668,"echo_tps":70703.33333333333,"echo_mibs":38.7,"event_tps":1970.6666666666667,"event_mibs":1.3,"req_tps":3614,"req_mibs":20.966666666666665},"parrhesia-memory":{"connect_avg_ms":13,"connect_max_ms":22.333333333333332,"echo_tps":60452.333333333336,"echo_mibs":33.1,"event_tps":1952.6666666666667,"event_mibs":1.3,"req_tps":3616,"req_mibs":20.766666666666666},"strfry":{"connect_avg_ms":3.6666666666666665,"connect_max_ms":6,"echo_tps":63128.666666666664,"echo_mibs":35.300000000000004,"event_tps":3442,"event_mibs":2.2333333333333334,"req_tps":1804,"req_mibs":11.699999999999998},"nostr-rs-relay":{"connect_avg_ms":2,"connect_max_ms":3.3333333333333335,"echo_tps":164995.33333333334,"echo_mibs":90.36666666666667,"event_tps":761.6666666666666,"event_mibs":0.5,"req_tps":846.3333333333334,"req_mibs":2.333333333333333}},"run_id":"local-2026-03-18T22:22:12Z-agent-8c8d5a8","source":{"kind":"local","git_tag":"v0.3.0","git_commit":"8c8d5a8"},"infra":{"provider":"local"},"bench":{"runs":3,"targets":["parrhesia-pg","parrhesia-memory","strfry","nostr-rs-relay"]}}
{"timestamp":"2026-03-18T22:36:37Z","machine_id":"agent","git_tag":"v0.5.0","git_commit":"e557eba","runs":3,"versions":{"parrhesia":"0.5.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":34.666666666666664,"connect_max_ms":61.666666666666664,"echo_tps":72441,"echo_mibs":39.666666666666664,"event_tps":1897.3333333333333,"event_mibs":1.2333333333333334,"req_tps":13.333333333333334,"req_mibs":0.03333333333333333},"parrhesia-memory":{"connect_avg_ms":43.333333333333336,"connect_max_ms":74.66666666666667,"echo_tps":62704.666666666664,"echo_mibs":34.300000000000004,"event_tps":1370,"event_mibs":0.8666666666666667,"req_tps":47,"req_mibs":0.16666666666666666},"strfry":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4.666666666666667,"echo_tps":61189.333333333336,"echo_mibs":34.2,"event_tps":3426.6666666666665,"event_mibs":2.2,"req_tps":1811.3333333333333,"req_mibs":11.766666666666666},"nostr-rs-relay":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4,"echo_tps":152654.33333333334,"echo_mibs":83.63333333333333,"event_tps":772.6666666666666,"event_mibs":0.5,"req_tps":878.3333333333334,"req_mibs":2.4}}} {"schema_version":2,"timestamp":"2026-03-18T22:30:08Z","machine_id":"agent","git_tag":"v0.4.0","git_commit":"b86b5db","runs":3,"versions":{"parrhesia":"0.4.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":11.333333333333334,"connect_max_ms":20.666666666666668,"echo_tps":69139.33333333333,"echo_mibs":37.833333333333336,"event_tps":1938.6666666666667,"event_mibs":1.3,"req_tps":4619.666666666667,"req_mibs":26.266666666666666},"parrhesia-memory":{"connect_avg_ms":10,"connect_max_ms":17.333333333333332,"echo_tps":62715.333333333336,"echo_mibs":34.333333333333336,"event_tps":1573,"event_mibs":1.0333333333333334,"req_tps":4768,"req_mibs":23.733333333333334},"strfry":{"connect_avg_ms":3.3333333333333335,"connect_max_ms":6,"echo_tps":60956.666666666664,"echo_mibs":34.06666666666667,"event_tps":3380.6666666666665,"event_mibs":2.2,"req_tps":1820.3333333333333,"req_mibs":11.800000000000002},"nostr-rs-relay":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4.333333333333333,"echo_tps":161165.33333333334,"echo_mibs":88.26666666666665,"event_tps":768,"event_mibs":0.5,"req_tps":847.3333333333334,"req_mibs":2.3000000000000003}},"run_id":"local-2026-03-18T22:30:08Z-agent-b86b5db","source":{"kind":"local","git_tag":"v0.4.0","git_commit":"b86b5db"},"infra":{"provider":"local"},"bench":{"runs":3,"targets":["parrhesia-pg","parrhesia-memory","strfry","nostr-rs-relay"]}}
{"timestamp":"2026-03-18T21:35:03Z","machine_id":"agent","git_tag":"v0.6.0","git_commit":"7b337d9","runs":3,"versions":{"parrhesia":"0.6.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":26.666666666666668,"connect_max_ms":45.333333333333336,"echo_tps":68100.33333333333,"echo_mibs":37.233333333333334,"event_tps":1647.3333333333333,"event_mibs":1.0666666666666667,"req_tps":3576.6666666666665,"req_mibs":18.833333333333332},"parrhesia-memory":{"connect_avg_ms":14.666666666666666,"connect_max_ms":24.333333333333332,"echo_tps":55978,"echo_mibs":30.633333333333336,"event_tps":882,"event_mibs":0.5666666666666668,"req_tps":6888,"req_mibs":36.06666666666666},"strfry":{"connect_avg_ms":3,"connect_max_ms":4.666666666666667,"echo_tps":67718.33333333333,"echo_mibs":37.86666666666667,"event_tps":3548.3333333333335,"event_mibs":2.3,"req_tps":1808,"req_mibs":11.699999999999998},"nostr-rs-relay":{"connect_avg_ms":2,"connect_max_ms":3.3333333333333335,"echo_tps":166178,"echo_mibs":91.03333333333335,"event_tps":787,"event_mibs":0.5,"req_tps":860.6666666666666,"req_mibs":2.4}}} {"schema_version":2,"timestamp":"2026-03-18T22:36:37Z","machine_id":"agent","git_tag":"v0.5.0","git_commit":"e557eba","runs":3,"versions":{"parrhesia":"0.5.0","strfry":"strfry 1.0.4 (nixpkgs)","nostr-rs-relay":"nostr-rs-relay 0.9.0","nostr-bench":"nostr-bench 0.4.0"},"servers":{"parrhesia-pg":{"connect_avg_ms":34.666666666666664,"connect_max_ms":61.666666666666664,"echo_tps":72441,"echo_mibs":39.666666666666664,"event_tps":1897.3333333333333,"event_mibs":1.2333333333333334,"req_tps":13.333333333333334,"req_mibs":0.03333333333333333},"parrhesia-memory":{"connect_avg_ms":43.333333333333336,"connect_max_ms":74.66666666666667,"echo_tps":62704.666666666664,"echo_mibs":34.300000000000004,"event_tps":1370,"event_mibs":0.8666666666666667,"req_tps":47,"req_mibs":0.16666666666666666},"strfry":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4.666666666666667,"echo_tps":61189.333333333336,"echo_mibs":34.2,"event_tps":3426.6666666666665,"event_mibs":2.2,"req_tps":1811.3333333333333,"req_mibs":11.766666666666666},"nostr-rs-relay":{"connect_avg_ms":2.6666666666666665,"connect_max_ms":4,"echo_tps":152654.33333333334,"echo_mibs":83.63333333333333,"event_tps":772.6666666666666,"event_mibs":0.5,"req_tps":878.3333333333334,"req_mibs":2.4}},"run_id":"local-2026-03-18T22:36:37Z-agent-e557eba","source":{"kind":"local","git_tag":"v0.5.0","git_commit":"e557eba"},"infra":{"provider":"local"},"bench":{"runs":3,"targets":["parrhesia-pg","parrhesia-memory","strfry","nostr-rs-relay"]}}

View File

@@ -78,6 +78,7 @@ in {
with pkgs; with pkgs;
[ [
just just
# Mix NIFs
gcc gcc
git git
gnumake gnumake
@@ -85,6 +86,8 @@ in {
automake automake
libtool libtool
pkg-config pkg-config
# for tests
openssl
# Nix code formatter # Nix code formatter
alejandra alejandra
# i18n # i18n
@@ -103,8 +106,11 @@ in {
nostr-rs-relay nostr-rs-relay
# Benchmark graph # Benchmark graph
gnuplot gnuplot
# Cloud benchmarks
hcloud
] ]
++ lib.optionals pkgs.stdenv.hostPlatform.isx86_64 [ ++ lib.optionals pkgs.stdenv.hostPlatform.isx86_64 [
# Nostr reference servers
strfry strfry
]; ];

1141
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
{ {
"dependencies": { "dependencies": {
"@mariozechner/pi-coding-agent": "^0.57.1" "@mariozechner/pi-coding-agent": "^0.60.0"
} }
} }

File diff suppressed because it is too large Load Diff

205
scripts/run_bench_cloud.sh Executable file
View File

@@ -0,0 +1,205 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$ROOT_DIR"
usage() {
cat <<'EOF'
usage:
./scripts/run_bench_cloud.sh [options] [-- extra args for cloud_bench_orchestrate.mjs]
Friendly wrapper around scripts/cloud_bench_orchestrate.mjs.
Defaults (override via env or flags):
datacenter: fsn1-dc14
server/client type: cx23
clients: 3
runs: 3
targets: parrhesia-pg,parrhesia-memory,strfry,nostr-rs-relay
Flags:
--quick Quick smoke profile (1 run, 1 client, lower load)
--clients N Override client count
--runs N Override run count
--targets CSV Override targets
--datacenter NAME Override datacenter
--server-type NAME Override server type
--client-type NAME Override client type
--image IMAGE Use remote Parrhesia image (e.g. ghcr.io/...)
--git-ref REF Build Parrhesia image from git ref (default: HEAD)
--keep Keep cloud resources after run
-h, --help
Environment overrides:
PARRHESIA_CLOUD_DATACENTER (default: fsn1-dc14)
PARRHESIA_CLOUD_SERVER_TYPE (default: cx23)
PARRHESIA_CLOUD_CLIENT_TYPE (default: cx23)
PARRHESIA_CLOUD_CLIENTS (default: 3)
PARRHESIA_BENCH_RUNS (default: 3)
PARRHESIA_CLOUD_TARGETS (default: all 4)
PARRHESIA_CLOUD_PARRHESIA_IMAGE (optional)
PARRHESIA_CLOUD_GIT_REF (default: HEAD)
Bench knobs (forwarded):
PARRHESIA_BENCH_CONNECT_COUNT
PARRHESIA_BENCH_CONNECT_RATE
PARRHESIA_BENCH_ECHO_COUNT
PARRHESIA_BENCH_ECHO_RATE
PARRHESIA_BENCH_ECHO_SIZE
PARRHESIA_BENCH_EVENT_COUNT
PARRHESIA_BENCH_EVENT_RATE
PARRHESIA_BENCH_REQ_COUNT
PARRHESIA_BENCH_REQ_RATE
PARRHESIA_BENCH_REQ_LIMIT
PARRHESIA_BENCH_KEEPALIVE_SECONDS
Examples:
# Default full cloud run
./scripts/run_bench_cloud.sh
# Quick smoke
./scripts/run_bench_cloud.sh --quick
# Use a GHCR image
./scripts/run_bench_cloud.sh --image ghcr.io/owner/parrhesia:latest
EOF
}
DATACENTER="${PARRHESIA_CLOUD_DATACENTER:-fsn1-dc14}"
SERVER_TYPE="${PARRHESIA_CLOUD_SERVER_TYPE:-cx23}"
CLIENT_TYPE="${PARRHESIA_CLOUD_CLIENT_TYPE:-cx23}"
CLIENTS="${PARRHESIA_CLOUD_CLIENTS:-3}"
RUNS="${PARRHESIA_BENCH_RUNS:-3}"
TARGETS="${PARRHESIA_CLOUD_TARGETS:-parrhesia-pg,parrhesia-memory,strfry,nostr-rs-relay}"
PARRHESIA_IMAGE="${PARRHESIA_CLOUD_PARRHESIA_IMAGE:-}"
GIT_REF="${PARRHESIA_CLOUD_GIT_REF:-HEAD}"
KEEP=0
QUICK=0
EXTRA_ARGS=()
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
--quick)
QUICK=1
shift
;;
--clients)
CLIENTS="$2"
shift 2
;;
--runs)
RUNS="$2"
shift 2
;;
--targets)
TARGETS="$2"
shift 2
;;
--datacenter)
DATACENTER="$2"
shift 2
;;
--server-type)
SERVER_TYPE="$2"
shift 2
;;
--client-type)
CLIENT_TYPE="$2"
shift 2
;;
--image)
PARRHESIA_IMAGE="$2"
shift 2
;;
--git-ref)
GIT_REF="$2"
shift 2
;;
--keep)
KEEP=1
shift
;;
--)
shift
EXTRA_ARGS+=("$@")
break
;;
*)
echo "Unknown argument: $1" >&2
usage
exit 1
;;
esac
done
if [[ "$QUICK" == "1" ]]; then
RUNS=1
CLIENTS=1
: "${PARRHESIA_BENCH_CONNECT_COUNT:=20}"
: "${PARRHESIA_BENCH_CONNECT_RATE:=20}"
: "${PARRHESIA_BENCH_ECHO_COUNT:=20}"
: "${PARRHESIA_BENCH_ECHO_RATE:=20}"
: "${PARRHESIA_BENCH_ECHO_SIZE:=512}"
: "${PARRHESIA_BENCH_EVENT_COUNT:=20}"
: "${PARRHESIA_BENCH_EVENT_RATE:=20}"
: "${PARRHESIA_BENCH_REQ_COUNT:=20}"
: "${PARRHESIA_BENCH_REQ_RATE:=20}"
: "${PARRHESIA_BENCH_REQ_LIMIT:=10}"
: "${PARRHESIA_BENCH_KEEPALIVE_SECONDS:=2}"
fi
CMD=(
node scripts/cloud_bench_orchestrate.mjs
--datacenter "$DATACENTER"
--server-type "$SERVER_TYPE"
--client-type "$CLIENT_TYPE"
--clients "$CLIENTS"
--runs "$RUNS"
--targets "$TARGETS"
)
if [[ -n "$PARRHESIA_IMAGE" ]]; then
CMD+=(--parrhesia-image "$PARRHESIA_IMAGE")
else
CMD+=(--git-ref "$GIT_REF")
fi
if [[ "$KEEP" == "1" ]]; then
CMD+=(--keep)
fi
# Forward bench knob envs if set
for kv in \
PARRHESIA_BENCH_CONNECT_COUNT \
PARRHESIA_BENCH_CONNECT_RATE \
PARRHESIA_BENCH_ECHO_COUNT \
PARRHESIA_BENCH_ECHO_RATE \
PARRHESIA_BENCH_ECHO_SIZE \
PARRHESIA_BENCH_EVENT_COUNT \
PARRHESIA_BENCH_EVENT_RATE \
PARRHESIA_BENCH_REQ_COUNT \
PARRHESIA_BENCH_REQ_RATE \
PARRHESIA_BENCH_REQ_LIMIT \
PARRHESIA_BENCH_KEEPALIVE_SECONDS
do
if [[ -n "${!kv:-}" ]]; then
flag="--$(echo "$kv" | tr '[:upper:]' '[:lower:]' | sed -E 's/^parrhesia_bench_//' | tr '_' '-')"
CMD+=("$flag" "${!kv}")
fi
done
CMD+=("${EXTRA_ARGS[@]}")
printf 'Running cloud bench:\n %q' "${CMD[0]}"
for ((i=1; i<${#CMD[@]}; i++)); do
printf ' %q' "${CMD[$i]}"
done
printf '\n\n'
"${CMD[@]}"

View File

@@ -78,11 +78,23 @@ const [, , jsonOut, timestamp, machineId, gitTag, gitCommit, runsStr, historyFil
const { versions, ...servers } = JSON.parse(fs.readFileSync(jsonOut, "utf8")); const { versions, ...servers } = JSON.parse(fs.readFileSync(jsonOut, "utf8"));
const entry = { const entry = {
schema_version: 2,
timestamp, timestamp,
run_id: `local-${timestamp}-${machineId}-${gitCommit}`,
machine_id: machineId, machine_id: machineId,
git_tag: gitTag, git_tag: gitTag,
git_commit: gitCommit, git_commit: gitCommit,
runs: Number(runsStr), runs: Number(runsStr),
source: {
kind: "local",
mode: "run_bench_collect",
git_ref: gitTag,
git_tag: gitTag,
git_commit: gitCommit,
},
infra: {
provider: "local",
},
versions: versions || {}, versions: versions || {},
servers, servers,
}; };

View File

@@ -7,100 +7,178 @@ cd "$ROOT_DIR"
usage() { usage() {
cat <<'EOF' cat <<'EOF'
usage: usage:
./scripts/run_bench_update.sh [machine_id] ./scripts/run_bench_update.sh [machine_id|all]
./scripts/run_bench_update.sh --machine <machine_id|all> [--run-id <run_id>]
./scripts/run_bench_update.sh --list
Regenerates bench/chart.svg and updates the benchmark table in README.md Regenerates bench/chart.svg and updates the benchmark table in README.md
from collected data in bench/history.jsonl. from collected data in bench/history.jsonl.
Arguments: Options:
machine_id Optional. Filter to a specific machine's data. --machine <id|all> Filter by machine_id (default: hostname -s)
Default: current machine (hostname -s) --run-id <id> Filter to an exact run_id
Use "all" to include all machines (will use latest entry per tag) --history-file <path> History JSONL file (default: bench/history.jsonl)
--list List available machines and runs, then exit
Examples: -h, --help
# Update chart for current machine
./scripts/run_bench_update.sh
# Update chart for specific machine
./scripts/run_bench_update.sh my-server
# Update chart using all machines (latest entry per tag wins)
./scripts/run_bench_update.sh all
EOF EOF
} }
if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then
usage
exit 0
fi
# --- Configuration -----------------------------------------------------------
BENCH_DIR="$ROOT_DIR/bench" BENCH_DIR="$ROOT_DIR/bench"
HISTORY_FILE="$BENCH_DIR/history.jsonl" HISTORY_FILE="$BENCH_DIR/history.jsonl"
CHART_FILE="$BENCH_DIR/chart.svg" CHART_FILE="$BENCH_DIR/chart.svg"
GNUPLOT_TEMPLATE="$BENCH_DIR/chart.gnuplot" GNUPLOT_TEMPLATE="$BENCH_DIR/chart.gnuplot"
README_FILE="$ROOT_DIR/README.md"
MACHINE_ID="${1:-$(hostname -s)}" MACHINE_ID="$(hostname -s)"
RUN_ID=""
LIST_ONLY=0
POSITIONAL_MACHINE=""
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
--machine)
MACHINE_ID="$2"
shift 2
;;
--run-id)
RUN_ID="$2"
shift 2
;;
--history-file)
HISTORY_FILE="$2"
shift 2
;;
--list)
LIST_ONLY=1
shift
;;
*)
if [[ -z "$POSITIONAL_MACHINE" ]]; then
POSITIONAL_MACHINE="$1"
shift
else
echo "Unexpected argument: $1" >&2
usage
exit 1
fi
;;
esac
done
if [[ -n "$POSITIONAL_MACHINE" ]]; then
MACHINE_ID="$POSITIONAL_MACHINE"
fi
if [[ ! -f "$HISTORY_FILE" ]]; then if [[ ! -f "$HISTORY_FILE" ]]; then
echo "Error: No history file found at $HISTORY_FILE" >&2 echo "Error: No history file found at $HISTORY_FILE" >&2
echo "Run ./scripts/run_bench_collect.sh first to collect benchmark data" >&2 echo "Run ./scripts/run_bench_collect.sh or ./scripts/run_bench_cloud.sh first" >&2
exit 1 exit 1
fi fi
if [[ "$LIST_ONLY" == "1" ]]; then
node - "$HISTORY_FILE" <<'NODE'
const fs = require("node:fs");
const [, , historyFile] = process.argv;
const entries = fs.readFileSync(historyFile, "utf8")
.split("\n")
.filter((l) => l.trim().length > 0)
.map((l) => JSON.parse(l));
if (entries.length === 0) {
console.log("No entries in history file.");
process.exit(0);
}
entries.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
const machines = new Map();
for (const e of entries) {
const machineId = e.machine_id || "unknown";
const prev = machines.get(machineId);
if (!prev) {
machines.set(machineId, { count: 1, latest: e });
} else {
prev.count += 1;
if ((e.timestamp || "") > (prev.latest.timestamp || "")) prev.latest = e;
}
}
console.log("Machines:");
console.log(" machine_id entries latest_timestamp latest_tag");
for (const [machineId, info] of [...machines.entries()].sort((a, b) => a[0].localeCompare(b[0]))) {
const id = machineId.padEnd(34, " ");
const count = String(info.count).padStart(7, " ");
const ts = (info.latest.timestamp || "").padEnd(24, " ");
const tag = info.latest.git_tag || "";
console.log(` ${id} ${count} ${ts} ${tag}`);
}
console.log("\nRuns (newest first):");
console.log(" timestamp run_id machine_id source git_tag targets");
for (const e of entries) {
const ts = (e.timestamp || "").slice(0, 19).padEnd(24, " ");
const runId = (e.run_id || "").slice(0, 36).padEnd(36, " ");
const machineId = (e.machine_id || "").slice(0, 24).padEnd(24, " ");
const source = (e.source?.kind || "").padEnd(6, " ");
const tag = (e.git_tag || "").slice(0, 16).padEnd(16, " ");
const targets = (e.bench?.targets || Object.keys(e.servers || {})).join(",");
console.log(` ${ts} ${runId} ${machineId} ${source} ${tag} ${targets}`);
}
NODE
exit 0
fi
WORK_DIR="$(mktemp -d)" WORK_DIR="$(mktemp -d)"
trap 'rm -rf "$WORK_DIR"' EXIT trap 'rm -rf "$WORK_DIR"' EXIT
# --- Generate chart ---------------------------------------------------------- echo "Generating chart (machine=$MACHINE_ID${RUN_ID:+, run_id=$RUN_ID})"
echo "Generating chart for machine: $MACHINE_ID" if ! node - "$HISTORY_FILE" "$MACHINE_ID" "$RUN_ID" "$WORK_DIR" <<'NODE'
node - "$HISTORY_FILE" "$MACHINE_ID" "$WORK_DIR" <<'NODE'
const fs = require("node:fs"); const fs = require("node:fs");
const path = require("node:path"); const path = require("node:path");
const [, , historyFile, machineId, workDir] = process.argv; const [, , historyFile, machineId, runId, workDir] = process.argv;
if (!fs.existsSync(historyFile)) {
console.log(" no history file, skipping chart generation");
process.exit(0);
}
const lines = fs.readFileSync(historyFile, "utf8")
.split("\n")
.filter(l => l.trim().length > 0)
.map(l => JSON.parse(l));
// Filter to selected machine(s)
let entries;
if (machineId === "all") {
entries = lines;
console.log(" using all machines");
} else {
entries = lines.filter(e => e.machine_id === machineId);
console.log(" filtered to machine: " + machineId);
}
if (entries.length === 0) {
console.log(" no history entries for machine '" + machineId + "', skipping chart");
process.exit(0);
}
// Sort chronologically, deduplicate by tag (latest wins),
// then order the resulting series by git tag.
entries.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
const byTag = new Map();
for (const e of entries) {
byTag.set(e.git_tag, e);
}
const deduped = [...byTag.values()];
function parseSemverTag(tag) { function parseSemverTag(tag) {
const match = /^v?(\d+)\.(\d+)\.(\d+)$/.exec(tag); const match = /^v?(\d+)\.(\d+)\.(\d+)$/.exec(tag || "");
return match ? match.slice(1).map(Number) : null; return match ? match.slice(1).map(Number) : null;
} }
const all = fs.readFileSync(historyFile, "utf8")
.split("\n")
.filter((l) => l.trim().length > 0)
.map((l) => JSON.parse(l));
let selected = all;
if (runId && runId.length > 0) {
selected = all.filter((e) => e.run_id === runId);
console.log(` filtered by run_id: ${runId}`);
} else if (machineId !== "all") {
selected = all.filter((e) => e.machine_id === machineId);
console.log(` filtered to machine: ${machineId}`);
} else {
console.log(" using all machines");
}
if (selected.length === 0) {
console.error(" no matching history entries");
process.exit(1);
}
selected.sort((a, b) => (a.timestamp || "").localeCompare(b.timestamp || ""));
const byTag = new Map();
for (const e of selected) {
byTag.set(e.git_tag || "untagged", e);
}
const deduped = [...byTag.values()];
deduped.sort((a, b) => { deduped.sort((a, b) => {
const aTag = parseSemverTag(a.git_tag); const aTag = parseSemverTag(a.git_tag);
const bTag = parseSemverTag(b.git_tag); const bTag = parseSemverTag(b.git_tag);
@@ -109,24 +187,19 @@ deduped.sort((a, b) => {
return aTag[0] - bTag[0] || aTag[1] - bTag[1] || aTag[2] - bTag[2]; return aTag[0] - bTag[0] || aTag[1] - bTag[1] || aTag[2] - bTag[2];
} }
return a.git_tag.localeCompare(b.git_tag, undefined, { numeric: true }); return (a.git_tag || "").localeCompare(b.git_tag || "", undefined, { numeric: true });
}); });
// Determine which non-parrhesia servers are present
const baselineServerNames = ["strfry", "nostr-rs-relay"]; const baselineServerNames = ["strfry", "nostr-rs-relay"];
const presentBaselines = baselineServerNames.filter(srv => const presentBaselines = baselineServerNames.filter((srv) => deduped.some((e) => e.servers?.[srv]));
deduped.some(e => e.servers[srv])
);
// Metrics to chart
const chartMetrics = [ const chartMetrics = [
{ key: "event_tps", label: "Event Throughput (TPS) — higher is better", file: "event_tps.tsv", ylabel: "TPS" }, { key: "event_tps", label: "Event Throughput (TPS) — higher is better", file: "event_tps.tsv", ylabel: "TPS" },
{ key: "req_tps", label: "Req Throughput (TPS) — higher is better", file: "req_tps.tsv", ylabel: "TPS" }, { key: "req_tps", label: "Req Throughput (TPS) — higher is better", file: "req_tps.tsv", ylabel: "TPS" },
{ key: "echo_tps", label: "Echo Throughput (TPS) — higher is better", file: "echo_tps.tsv", ylabel: "TPS" }, { key: "echo_tps", label: "Echo Throughput (TPS) — higher is better", file: "echo_tps.tsv", ylabel: "TPS" },
{ key: "connect_avg_ms", label: "Connect Avg Latency (ms) — lower is better", file: "connect_avg_ms.tsv", ylabel: "ms" }, { key: "connect_avg_ms", label: "Connect Avg Latency (ms) — lower is better", file: "connect_avg_ms.tsv", ylabel: "ms" },
]; ];
// Write per-metric TSV files
for (const cm of chartMetrics) { for (const cm of chartMetrics) {
const header = ["tag", "parrhesia-pg", "parrhesia-memory"]; const header = ["tag", "parrhesia-pg", "parrhesia-memory"];
for (const srv of presentBaselines) header.push(srv); for (const srv of presentBaselines) header.push(srv);
@@ -134,12 +207,12 @@ for (const cm of chartMetrics) {
const rows = [header.join("\t")]; const rows = [header.join("\t")];
for (const e of deduped) { for (const e of deduped) {
const row = [ const row = [
e.git_tag, e.git_tag || "untagged",
e.servers["parrhesia-pg"]?.[cm.key] ?? "NaN", e.servers?.["parrhesia-pg"]?.[cm.key] ?? "NaN",
e.servers["parrhesia-memory"]?.[cm.key] ?? "NaN", e.servers?.["parrhesia-memory"]?.[cm.key] ?? "NaN",
]; ];
for (const srv of presentBaselines) { for (const srv of presentBaselines) {
row.push(e.servers[srv]?.[cm.key] ?? "NaN"); row.push(e.servers?.[srv]?.[cm.key] ?? "NaN");
} }
rows.push(row.join("\t")); rows.push(row.join("\t"));
} }
@@ -147,7 +220,6 @@ for (const cm of chartMetrics) {
fs.writeFileSync(path.join(workDir, cm.file), rows.join("\n") + "\n", "utf8"); fs.writeFileSync(path.join(workDir, cm.file), rows.join("\n") + "\n", "utf8");
} }
// Generate gnuplot plot commands (handles variable column counts)
const serverLabels = ["parrhesia-pg", "parrhesia-memory"]; const serverLabels = ["parrhesia-pg", "parrhesia-memory"];
for (const srv of presentBaselines) serverLabels.push(srv); for (const srv of presentBaselines) serverLabels.push(srv);
@@ -158,10 +230,9 @@ for (const cm of chartMetrics) {
plotLines.push(`set ylabel "${cm.ylabel}"`); plotLines.push(`set ylabel "${cm.ylabel}"`);
const plotParts = []; const plotParts = [];
// Column 2 = parrhesia-pg, 3 = parrhesia-memory, 4+ = baselines
plotParts.push(`${dataFile} using 0:2:xtic(1) lt 1 title "${serverLabels[0]}"`); plotParts.push(`${dataFile} using 0:2:xtic(1) lt 1 title "${serverLabels[0]}"`);
plotParts.push(`'' using 0:3 lt 2 title "${serverLabels[1]}"`); plotParts.push(`'' using 0:3 lt 2 title "${serverLabels[1]}"`);
for (let i = 0; i < presentBaselines.length; i++) { for (let i = 0; i < presentBaselines.length; i += 1) {
plotParts.push(`'' using 0:${4 + i} lt ${3 + i} title "${serverLabels[2 + i]}"`); plotParts.push(`'' using 0:${4 + i} lt ${3 + i} title "${serverLabels[2 + i]}"`);
} }
@@ -169,14 +240,22 @@ for (const cm of chartMetrics) {
plotLines.push(""); plotLines.push("");
} }
fs.writeFileSync( fs.writeFileSync(path.join(workDir, "plot_commands.gnuplot"), plotLines.join("\n") + "\n", "utf8");
path.join(workDir, "plot_commands.gnuplot"),
plotLines.join("\n") + "\n",
"utf8"
);
console.log(" " + deduped.length + " tag(s), " + presentBaselines.length + " baseline server(s)"); const latestForReadme = [...selected]
.sort((a, b) => (b.timestamp || "").localeCompare(a.timestamp || ""))
.find((e) => e.servers?.["parrhesia-pg"] && e.servers?.["parrhesia-memory"]);
if (latestForReadme) {
fs.writeFileSync(path.join(workDir, "latest_entry.json"), JSON.stringify(latestForReadme), "utf8");
}
console.log(` selected=${selected.length}, series_tags=${deduped.length}, baselines=${presentBaselines.length}`);
NODE NODE
then
echo "No matching data for chart/update" >&2
exit 1
fi
if [[ -f "$WORK_DIR/plot_commands.gnuplot" ]]; then if [[ -f "$WORK_DIR/plot_commands.gnuplot" ]]; then
gnuplot \ gnuplot \
@@ -185,52 +264,26 @@ if [[ -f "$WORK_DIR/plot_commands.gnuplot" ]]; then
"$GNUPLOT_TEMPLATE" "$GNUPLOT_TEMPLATE"
echo " chart written to $CHART_FILE" echo " chart written to $CHART_FILE"
else else
echo " chart generation skipped (no data for this machine)" echo " chart generation skipped"
exit 0
fi fi
# --- Update README.md -------------------------------------------------------
echo "Updating README.md with latest benchmark..." echo "Updating README.md with latest benchmark..."
# Find the most recent entry for this machine if [[ ! -f "$WORK_DIR/latest_entry.json" ]]; then
LATEST_ENTRY=$(node - "$HISTORY_FILE" "$MACHINE_ID" <<'NODE' echo "Warning: no selected entry contains both parrhesia-pg and parrhesia-memory; skipping README table update" >&2
const fs = require("node:fs"); echo
const [, , historyFile, machineId] = process.argv; echo "Benchmark rendering complete. Files updated:"
echo " $CHART_FILE"
const lines = fs.readFileSync(historyFile, "utf8") echo
.split("\n")
.filter(l => l.trim().length > 0)
.map(l => JSON.parse(l));
let entries;
if (machineId === "all") {
entries = lines;
} else {
entries = lines.filter(e => e.machine_id === machineId);
}
if (entries.length === 0) {
console.error("No entries found for machine: " + machineId);
process.exit(1);
}
// Get latest entry
entries.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
console.log(JSON.stringify(entries[0]));
NODE
)
if [[ -z "$LATEST_ENTRY" ]]; then
echo "Warning: Could not find latest entry, skipping README update" >&2
exit 0 exit 0
fi fi
node - "$LATEST_ENTRY" "$ROOT_DIR/README.md" <<'NODE' LATEST_ENTRY="$(cat "$WORK_DIR/latest_entry.json")"
node - "$LATEST_ENTRY" "$README_FILE" <<'NODE'
const fs = require("node:fs"); const fs = require("node:fs");
const [, , entryJson, readmePath] = process.argv; const [, , entryJson, readmePath] = process.argv;
const entry = JSON.parse(entryJson); const entry = JSON.parse(entryJson);
const servers = entry.servers || {}; const servers = entry.servers || {};
@@ -240,11 +293,7 @@ const strfry = servers["strfry"];
const nostrRs = servers["nostr-rs-relay"]; const nostrRs = servers["nostr-rs-relay"];
if (!pg || !mem) { if (!pg || !mem) {
const present = Object.keys(servers).sort().join(", ") || "(none)"; console.error("Selected entry is missing parrhesia-pg or parrhesia-memory");
console.error(
"Latest benchmark entry must include parrhesia-pg and parrhesia-memory. Present servers: " +
present
);
process.exit(1); process.exit(1);
} }
@@ -259,26 +308,26 @@ function ratio(base, other) {
function boldIf(ratioStr, lowerIsBetter) { function boldIf(ratioStr, lowerIsBetter) {
if (ratioStr === "n/a") return ratioStr; if (ratioStr === "n/a") return ratioStr;
const num = parseFloat(ratioStr); const num = Number.parseFloat(ratioStr);
if (!Number.isFinite(num)) return ratioStr;
const better = lowerIsBetter ? num < 1 : num > 1; const better = lowerIsBetter ? num < 1 : num > 1;
return better ? "**" + ratioStr + "**" : ratioStr; return better ? `**${ratioStr}**` : ratioStr;
} }
const metricRows = [ const metricRows = [
["connect avg latency (ms) ↓", "connect_avg_ms", true], ["connect avg latency (ms) ↓", "connect_avg_ms", true],
["connect max latency (ms) ↓", "connect_max_ms", true], ["connect max latency (ms) ↓", "connect_max_ms", true],
["echo throughput (TPS) ↑", "echo_tps", false], ["echo throughput (TPS) ↑", "echo_tps", false],
["echo throughput (MiB/s) ↑", "echo_mibs", false], ["echo throughput (MiB/s) ↑", "echo_mibs", false],
["event throughput (TPS) ↑", "event_tps", false], ["event throughput (TPS) ↑", "event_tps", false],
["event throughput (MiB/s) ↑", "event_mibs", false], ["event throughput (MiB/s) ↑", "event_mibs", false],
["req throughput (TPS) ↑", "req_tps", false], ["req throughput (TPS) ↑", "req_tps", false],
["req throughput (MiB/s) ↑", "req_mibs", false], ["req throughput (MiB/s) ↑", "req_mibs", false],
]; ];
const hasStrfry = !!strfry; const hasStrfry = !!strfry;
const hasNostrRs = !!nostrRs; const hasNostrRs = !!nostrRs;
// Build header
const header = ["metric", "parrhesia-pg", "parrhesia-mem"]; const header = ["metric", "parrhesia-pg", "parrhesia-mem"];
if (hasStrfry) header.push("strfry"); if (hasStrfry) header.push("strfry");
if (hasNostrRs) header.push("nostr-rs-relay"); if (hasNostrRs) header.push("nostr-rs-relay");
@@ -287,7 +336,7 @@ if (hasStrfry) header.push("strfry/pg");
if (hasNostrRs) header.push("nostr-rs/pg"); if (hasNostrRs) header.push("nostr-rs/pg");
const alignRow = ["---"]; const alignRow = ["---"];
for (let i = 1; i < header.length; i++) alignRow.push("---:"); for (let i = 1; i < header.length; i += 1) alignRow.push("---:");
const rows = metricRows.map(([label, key, lowerIsBetter]) => { const rows = metricRows.map(([label, key, lowerIsBetter]) => {
const row = [label, toFixed(pg[key]), toFixed(mem[key])]; const row = [label, toFixed(pg[key]), toFixed(mem[key])];
@@ -304,13 +353,12 @@ const rows = metricRows.map(([label, key, lowerIsBetter]) => {
const tableLines = [ const tableLines = [
"| " + header.join(" | ") + " |", "| " + header.join(" | ") + " |",
"| " + alignRow.join(" | ") + " |", "| " + alignRow.join(" | ") + " |",
...rows.map(r => "| " + r.join(" | ") + " |"), ...rows.map((r) => "| " + r.join(" | ") + " |"),
]; ];
// Replace the first markdown table in the ## Benchmark section
const readme = fs.readFileSync(readmePath, "utf8"); const readme = fs.readFileSync(readmePath, "utf8");
const readmeLines = readme.split("\n"); const lines = readme.split("\n");
const benchIdx = readmeLines.findIndex(l => /^## Benchmark/.test(l)); const benchIdx = lines.findIndex((l) => /^## Benchmark/.test(l));
if (benchIdx === -1) { if (benchIdx === -1) {
console.error("Could not find '## Benchmark' section in README.md"); console.error("Could not find '## Benchmark' section in README.md");
process.exit(1); process.exit(1);
@@ -318,8 +366,8 @@ if (benchIdx === -1) {
let tableStart = -1; let tableStart = -1;
let tableEnd = -1; let tableEnd = -1;
for (let i = benchIdx + 1; i < readmeLines.length; i++) { for (let i = benchIdx + 1; i < lines.length; i += 1) {
if (readmeLines[i].startsWith("|")) { if (lines[i].startsWith("|")) {
if (tableStart === -1) tableStart = i; if (tableStart === -1) tableStart = i;
tableEnd = i; tableEnd = i;
} else if (tableStart !== -1) { } else if (tableStart !== -1) {
@@ -332,19 +380,19 @@ if (tableStart === -1) {
process.exit(1); process.exit(1);
} }
const before = readmeLines.slice(0, tableStart); const updated = [
const after = readmeLines.slice(tableEnd + 1); ...lines.slice(0, tableStart),
const updated = [...before, ...tableLines, ...after].join("\n"); ...tableLines,
...lines.slice(tableEnd + 1),
].join("\n");
fs.writeFileSync(readmePath, updated, "utf8"); fs.writeFileSync(readmePath, updated, "utf8");
console.log(" table updated (" + tableLines.length + " rows)"); console.log(` table updated (${tableLines.length} rows)`);
NODE NODE
# --- Done ---------------------------------------------------------------------
echo echo
echo "Benchmark rendering complete. Files updated:" echo "Benchmark rendering complete. Files updated:"
echo " $CHART_FILE" echo " $CHART_FILE"
echo " $ROOT_DIR/README.md" echo " $README_FILE"
echo echo
echo "Review with: git diff" echo "Review with: git diff"