Skip to content

Update SimpleWServer #130

Update SimpleWServer

Update SimpleWServer #130

Workflow file for this run

name: Probe
on:
workflow_dispatch:
pull_request:
branches: [ main ]
jobs:
probe:
name: Compliance Probe
runs-on: ubuntu-latest
permissions:
contents: write
actions: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Discover servers
id: discover
run: |
SERVERS='[]'
for f in src/Servers/*/probe.json; do
dir=$(basename "$(dirname "$f")")
name=$(jq -r .name "$f")
lang=$(jq -r '.language // ""' "$f")
SERVERS=$(echo "$SERVERS" | jq -c --arg d "$dir" --arg n "$name" --arg l "$lang" '. + [{"dir": $d, "name": $n, "language": $l}]')
done
echo "servers=$SERVERS" >> "$GITHUB_OUTPUT"
echo "Discovered: $(echo "$SERVERS" | jq -r '.[].name' | tr '\n' ', ')"
- name: Detect changes
id: changes
run: |
SERVERS='${{ steps.discover.outputs.servers }}'
set_all() {
echo "servers=$SERVERS" >> "$GITHUB_OUTPUT"
}
# workflow_dispatch always runs everything
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
set_all
exit 0
fi
CHANGED=$(git diff --name-only origin/${{ github.base_ref }}...HEAD)
# Global triggers → run all
if echo "$CHANGED" | grep -qE '^(src/Http11Probe/|src/Http11Probe\.Cli/|Directory\.Build\.props|\.dockerignore|\.github/workflows/probe\.yml)'; then
set_all
exit 0
fi
AFFECTED='[]'
for row in $(echo "$SERVERS" | jq -r '.[] | @base64'); do
dir=$(echo "$row" | base64 -d | jq -r '.dir')
name=$(echo "$row" | base64 -d | jq -r '.name')
lang=$(echo "$row" | base64 -d | jq -r '.language')
if echo "$CHANGED" | grep -q "^src/Servers/${dir}/"; then
AFFECTED=$(echo "$AFFECTED" | jq -c --arg d "$dir" --arg n "$name" --arg l "$lang" '. + [{"dir": $d, "name": $n, "language": $l}]')
fi
done
echo "servers=$AFFECTED" >> "$GITHUB_OUTPUT"
- name: Setup .NET
if: steps.changes.outputs.servers != '[]'
uses: actions/setup-dotnet@v4
with:
dotnet-version: '10.0'
- name: Build probe CLI
if: steps.changes.outputs.servers != '[]'
run: dotnet build Http11Probe.slnx -c Release
# ── Build / Run / Probe / Kill — one server at a time ──────────
- name: Probe servers
if: steps.changes.outputs.servers != '[]'
run: |
SERVERS='${{ steps.changes.outputs.servers }}'
PROBE_PORT=8080
for row in $(echo "$SERVERS" | jq -r '.[] | @base64'); do
dir=$(echo "$row" | base64 -d | jq -r '.dir')
name=$(echo "$row" | base64 -d | jq -r '.name')
tag=$(echo "probe-$dir" | tr '[:upper:]' '[:lower:]')
echo "::group::$name"
# Build
docker build -t "$tag" -f "src/Servers/$dir/Dockerfile" .
# Run
docker run -d --name probe-target --network host "$tag"
# Wait
for i in $(seq 1 30); do
curl -sf "http://localhost:${PROBE_PORT}/" > /dev/null 2>&1 && break
sleep 1
done
# Probe
dotnet run --no-build -c Release --project src/Http11Probe.Cli -- \
--host localhost --port "$PROBE_PORT" --output "probe-${dir}.json" || true
# Kill
docker stop probe-target && docker rm probe-target
echo "::endgroup::"
done
- name: Cleanup
if: always()
run: docker rm -f probe-target 2>/dev/null || true
# ── Process results ────────────────────────────────────────────
- name: Process results
if: steps.changes.outputs.servers != '[]'
env:
PROBE_SERVERS: ${{ steps.changes.outputs.servers }}
run: |
python3 << 'PYEOF'
import json, sys, os, subprocess, pathlib
# ── Pass through CLI verdicts (evaluation now lives in C#) ──
def evaluate(raw):
results = []
for r in raw['results']:
status = r.get('statusCode')
conn = r.get('connectionState', '')
got = str(status) if status is not None else conn
expected = r.get('expected', '?')
verdict = r['verdict']
scored = r.get('scored', True)
reason = r['description']
if verdict == 'Fail':
reason = f"Expected {expected}, got {got} — {reason}"
results.append({
'id': r['id'], 'description': r['description'],
'category': r['category'], 'rfc': r.get('rfcReference'),
'verdict': verdict, 'statusCode': status,
'expected': expected, 'got': got,
'connectionState': conn, 'reason': reason,
'scored': scored,
'rfcLevel': r.get('rfcLevel', 'Must'),
'durationMs': r.get('durationMs', 0),
'rawRequest': r.get('rawRequest'),
'rawResponse': r.get('rawResponse'),
'behavioralNote': r.get('behavioralNote'),
'doubleFlush': r.get('doubleFlush'),
})
scored_results = [r for r in results if r['scored']]
scored_pass = sum(1 for r in scored_results if r['verdict'] == 'Pass')
scored_fail = sum(1 for r in scored_results if r['verdict'] == 'Fail')
scored_warn = sum(1 for r in scored_results if r['verdict'] == 'Warn')
unscored = sum(1 for r in results if not r['scored'])
return {
'summary': {'total': len(results), 'scored': len(scored_results), 'passed': scored_pass, 'failed': scored_fail, 'warnings': scored_warn, 'unscored': unscored},
'results': results,
}
# ── Process each server ──────────────────────────────────────
servers_config = json.loads(os.environ['PROBE_SERVERS'])
SERVERS = [(s['name'], f"probe-{s['dir']}.json", s.get('language', '')) for s in servers_config]
commit_id = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip()
commit_msg = subprocess.check_output(['git', 'log', '-1', '--format=%s']).decode().strip()
commit_time = subprocess.check_output(['git', 'log', '-1', '--format=%cI']).decode().strip()
server_data = []
for name, path, language in SERVERS:
p = pathlib.Path(path)
if not p.exists():
print(f'::warning::{name}: result file {path} not found, skipping')
continue
with open(path) as f:
raw = json.load(f)
ev = evaluate(raw)
ev['name'] = name
ev['language'] = language
server_data.append(ev)
s = ev['summary']
print(f"{name}: {s['passed']}/{s['scored']} passed, {s['failed']} failed, {s['warnings']} warnings")
if not server_data:
print('::warning::No probe results found — nothing to report')
sys.exit(0)
# ── Baseline gate ─────────────────────────────────────────────
BASELINE_TESTS = {'COMP-BASELINE', 'COMP-POST-CL-BODY'}
baseline_failures = []
for sv in server_data:
for r in sv['results']:
if r['id'] in BASELINE_TESTS and r['verdict'] == 'Fail':
baseline_failures.append(f"{sv['name']}: {r['id']} — got {r['got']}")
if baseline_failures:
for f in baseline_failures:
print(f'::error::{f}')
# ── Write data.js ────────────────────────────────────────────
output = {
'commit': {'id': commit_id, 'message': commit_msg, 'timestamp': commit_time},
'servers': server_data,
}
with open('probe-data.js', 'w') as f:
f.write('window.PROBE_DATA = ' + json.dumps(output) + ';')
# ── Write PR comment ─────────────────────────────────────────
lines = ['<!-- http11probe-results -->', '## Http11Probe — Compliance Comparison', '']
# Summary table with bars
max_scored = max(s['summary']['scored'] for s in server_data)
BAR_WIDTH = 20
lines.append('| Server | Score | |')
lines.append('|--------|------:|---|')
for sv in sorted(server_data, key=lambda s: s['summary']['passed'] + s['summary']['warnings'], reverse=True):
s = sv['summary']
score = s['passed'] + s['warnings']
pct = score / s['scored'] if s['scored'] else 0
filled = round(pct * BAR_WIDTH)
bar = '\u2588' * filled + '\u2591' * (BAR_WIDTH - filled)
lines.append(f"| **{sv['name']}** | {score}/{s['scored']} | `{bar}` {pct:.0%} |")
lines.append('')
# Collect all test IDs in order from first server
test_ids = [r['id'] for r in server_data[0]['results']] if server_data else []
# Build lookup: server_name -> {test_id -> result}
lookup = {}
for sv in server_data:
lookup[sv['name']] = {r['id']: r for r in sv['results']}
names = [sv['name'] for sv in server_data]
import re
def short(tid):
return re.sub(r'^(RFC\d+-[\d.]+-|COMP-|SMUG-|MAL-|NORM-)', '', tid)
# Baseline status
if baseline_failures:
lines.append('### ❌ Baseline Failed')
lines.append('')
for bf in baseline_failures:
lines.append(f'- `{bf}`')
lines.append('')
else:
lines.append('### ✅ Baseline Passed')
lines.append('')
for cat_name, title in [('Compliance', 'Compliance'), ('Smuggling', 'Smuggling'), ('MalformedInput', 'Malformed Input'), ('Normalization', 'Header Normalization')]:
cat_tests = [tid for tid in test_ids if lookup[names[0]][tid]['category'] == cat_name]
if not cat_tests:
continue
lines.append(f'### {title}')
lines.append('')
# Header row: Test | Expected | Server1 | Server2 | ...
hdr = '| Test | Expected | ' + ' | '.join(f'**{n}**' for n in names) + ' |'
sep = '|---|---' + ''.join('|:---:' for _ in names) + '|'
lines.append(hdr)
lines.append(sep)
# One row per test
for tid in cat_tests:
first = lookup[names[0]][tid]
expected = first['expected']
cells = []
for n in names:
r = lookup[n].get(tid)
if not r:
cells.append('—')
else:
icon = '✅' if r['verdict'] == 'Pass' else ('⚠️' if r['verdict'] == 'Warn' else '❌')
cells.append(f"{icon}`{r['got']}`")
lines.append(f"| `{short(tid)}` | {expected} | " + ' | '.join(cells) + ' |')
lines.append('')
lines.append(f"<sub>Commit: {commit_id[:7]}</sub>")
with open('probe-comment.md', 'w') as f:
f.write('\n'.join(lines))
if baseline_failures:
print('::warning::Baseline failures detected — results will still be published')
PYEOF
# ── Upload / publish ───────────────────────────────────────────
- name: Upload results
uses: actions/upload-artifact@v4
if: always()
with:
name: probe-results
path: probe-*.json
- name: Save PR metadata
if: github.event_name == 'pull_request' && steps.changes.outputs.servers != '[]'
run: echo '${{ github.event.number }}' > pr-number.txt
- name: Upload PR comment
if: always() && github.event_name == 'pull_request' && steps.changes.outputs.servers != '[]'
uses: actions/upload-artifact@v4
with:
name: probe-pr-comment
path: |
probe-comment.md
pr-number.txt
- name: Push to latest-results
if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main'
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
if git fetch origin latest-results 2>/dev/null; then
git worktree add /tmp/latest-results origin/latest-results
else
git worktree add --detach /tmp/latest-results HEAD
git -C /tmp/latest-results switch --orphan latest-results
fi
mkdir -p /tmp/latest-results/probe
cp probe-data.js /tmp/latest-results/probe/data.js
cd /tmp/latest-results
git add probe/data.js
if git diff --cached --quiet; then
echo "No changes to commit."
else
git commit -m "Update probe results"
git push origin HEAD:latest-results
fi
cd -
git worktree remove /tmp/latest-results || true
- name: Rebuild docs
if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main'
run: gh workflow run "Deploy Docs to GitHub Pages"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}