Skip to content

Gateway Perf (Create 50 APIs + TPS) #1

Gateway Perf (Create 50 APIs + TPS)

Gateway Perf (Create 50 APIs + TPS) #1

Workflow file for this run

name: Gateway Perf (Create 50 APIs + TPS)
on:
workflow_dispatch:
# schedule:
# - cron: "0 20 * * *" # daily 20:00 UTC (adjust)
jobs:
perf:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install tools (JMeter + jq)
run: |
sudo apt-get update
sudo apt-get install -y jq
# Install JMeter
JMETER_VER=5.6.3
curl -L -o apache-jmeter.tgz "https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-${JMETER_VER}.tgz"
tar -xzf apache-jmeter.tgz
echo "$PWD/apache-jmeter-${JMETER_VER}/bin" >> $GITHUB_PATH
- name: Verify config.toml is a file
run: |
set -e
ls -la gateway/configs
test -f gateway/configs/config.toml
echo "✅ gateway/configs/config.toml exists and is a file"
- name: Start gateway stack
run: |
cd gateway
ls -la
docker compose -f docker-compose-perf.yaml up -d
docker compose ps
- name: Wait for gateway-controller /health
run: |
set -euo pipefail
COMPOSE_FILE="gateway/docker-compose-perf.yaml"
SVC="gateway-controller"
URL="http://127.0.0.1:9090/health"
echo "Waiting for $URL ..."
for i in $(seq 1 90); do
# show status every 10 tries
if (( i % 10 == 1 )); then
docker compose -f "$COMPOSE_FILE" ps || true
fi
code="$(curl -sS -o /tmp/health.json -w "%{http_code}" "$URL" || true)"
if [[ "$code" == "200" ]]; then
echo "✅ Healthy:"
cat /tmp/health.json
exit 0
fi
echo "Attempt $i/90: not ready (HTTP $code)"
sleep 2
done
echo "❌ Health check failed"
echo "--- last controller logs ---"
docker compose -f "$COMPOSE_FILE" logs --no-color --tail=200 "$SVC" || true
exit 1
- name: Debug netty port
run: |
docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
docker compose -f gateway/docker-compose-perf.yaml port netty 8085 || true
curl -v http://127.0.0.1:8085/ || true
- name: Create 50 APIs + collect docker stats CSV
run: |
chmod +x gateway/perf/create_apis_and_capture_stats.sh
# Create 50 APIs and write stats.csv
gateway/perf/create_apis_and_capture_stats.sh
tail -n 5 stats.csv || true
- name: Verify JMX exists
run: |
ls -la gateway/perf
test -f gateway/perf/weather_perf_random_50.jmx
echo "✅ JMX file exists"
- name: Run JMeter TPS test (random across 50 APIs)
env:
# Increase file descriptors for high concurrency (best-effort on hosted runners)
# On hosted runners this may not go very high; for real perf use self-hosted runner.
JMETER_HEAP: "-Xms1g -Xmx1g"
run: |
ulimit -n 100000 || true
export HEAP="${JMETER_HEAP}"
# Non-GUI run
jmeter -n \
-t gateway/perf/weather_perf_random_50.jmx \
-l perf.jtl \
-j jmeter.log
# Optional: quick TPS summary from JTL (prints to job logs)
# This assumes standard JTL CSV with timeStamp,elapsed,...,success in column 8.
awk -F',' '
NR==1{next}
{count++; if($8=="false")err++; sum+=$2; ts=$1;
if(min==0||ts<min)min=ts; if(ts>max)max=ts}
END{
dur=(max-min)/1000.0;
tps=(dur>0)?count/dur:0;
avg=(count>0)?sum/count:0;
errpct=(count>0)?(err*100.0/count):0;
printf "Requests=%d Duration=%.1fs TPS=%.2f AvgMs=%.1f ErrPct=%.2f\n", count, dur, tps, avg, errpct
}' perf.jtl || true
- name: Docker stats snapshot
if: always()
run: |
docker stats --no-stream || true
docker compose logs --no-color | tail -n 200 || true
- uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install graph dependencies
run: |
python -m pip install --upgrade pip
pip install matplotlib
- name: Generate performance graphs
run: |
mkdir -p docs/performance
python3 << 'EOF'
import csv
import matplotlib.pyplot as plt
from collections import defaultdict
# ---------- Memory graphs ----------
api, router_mem, controller_mem, policy_mem = [], [], [], []
def mib(v): return float(v.replace("MiB","").strip())
with open("stats.csv") as f:
sample = f.read(1024)
delim = "\t" if "\t" in sample else ","
f.seek(0)
r = csv.DictReader(f, delimiter=delim)
for row in r:
api.append(int(row["api_count"]))
router_mem.append(mib(row["router_mem_used"]))
controller_mem.append(mib(row["controller_mem_used"]))
policy_mem.append(mib(row["policy_mem_used"]))
def plot(x, y, title, out):
plt.figure(figsize=(9,4))
plt.plot(x, y, marker="o")
plt.xlabel("API Count")
plt.ylabel("Memory (MiB)")
plt.title(title)
plt.grid(True)
plt.tight_layout()
plt.savefig(out)
plt.close()
plot(api, router_mem, "Router Memory vs API Count",
"docs/performance/router_memory_vs_api_count.png")
plot(api, controller_mem, "Controller Memory vs API Count",
"docs/performance/controller_memory_vs_api_count.png")
plot(api, policy_mem, "Policy Engine Memory vs API Count",
"docs/performance/policy_memory_vs_api_count.png")
# ---------- TPS over time ----------
buckets = defaultdict(int)
with open("perf.jtl") as f:
r = csv.DictReader(f)
for row in r:
ts = int(row["timeStamp"]) // 1000
buckets[ts] += 1
times = sorted(buckets)
t0 = times[0]
x = [t - t0 for t in times]
y = [buckets[t] for t in times]
plt.figure(figsize=(10,4))
plt.plot(x, y)
plt.xlabel("Time since start (s)")
plt.ylabel("TPS")
plt.title("TPS over Time")
plt.grid(True)
plt.tight_layout()
plt.savefig("docs/performance/tps_over_time.png")
plt.close()
EOF
- name: Cleanup JMeter artifacts
run: |
rm -rf apache-jmeter-*
rm -f *.jtl jmeter.log
git status --short || true
- name: Commit performance docs
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add docs/performance
git commit -m "Add gateway performance graphs" || echo "Nothing to commit"
- name: Create PR (docs only)
uses: peter-evans/create-pull-request@v6
with:
title: "Add gateway performance results"
body: |
This PR adds automatically generated performance graphs:
- Memory vs API count
- TPS over time
branch: perf-results
add-paths: |
docs/performance/**
- name: Upload performance graphs
uses: actions/upload-artifact@v4
with:
name: api-scale-performance-graphs
path: |
router_memory_vs_api_count.png
controller_memory_vs_api_count.png
policy_memory_vs_api_count.png
tps_over_time.png
- name: Upload artifacts (CSV + JTL + logs)
if: always()
uses: actions/upload-artifact@v4
with:
name: perf-results
path: |
stats.csv
perf.jtl
jmeter.log