Skip to content

Instantly share code, notes, and snippets.

@m0veax
Last active February 6, 2026 13:06
Show Gist options
  • Select an option

  • Save m0veax/5c0d6b5097e631112934c0684d441a6b to your computer and use it in GitHub Desktop.

Select an option

Save m0veax/5c0d6b5097e631112934c0684d441a6b to your computer and use it in GitHub Desktop.
[vibecoded] netzbremse json prometheus exporter
{
"id": null,
"uid": "speedtest-live",
"title": "Speedtest Live Metrics",
"timezone": "",
"version": 1,
"schemaVersion": 38,
"refresh": "30s",
"tags": ["speedtest", "network", "prometheus"],
"panels": [
{
"type": "timeseries",
"title": "Download (MBit/s)",
"id": 1,
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 0 },
"datasource": "$datasource",
"fieldConfig": {
"defaults": { "unit": "Mbits/s", "decimals": 2 }
},
"targets": [
{
"refId": "A",
"expr": "speedtest_download{endpoint=~\"$endpoint\"} / 1e6",
"legendFormat": "{{endpoint}}"
}
]
},
{
"type": "timeseries",
"title": "Upload (MBit/s)",
"id": 2,
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 0 },
"datasource": "$datasource",
"fieldConfig": {
"defaults": { "unit": "Mbits/s", "decimals": 2 }
},
"targets": [
{
"refId": "A",
"expr": "speedtest_upload{endpoint=~\"$endpoint\"} / 1e6",
"legendFormat": "{{endpoint}}"
}
]
},
{
"type": "timeseries",
"title": "Latency (ms)",
"id": 3,
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 8 },
"datasource": "$datasource",
"fieldConfig": { "defaults": { "unit": "ms" } },
"targets": [
{
"refId": "A",
"expr": "speedtest_latency{endpoint=~\"$endpoint\"}",
"legendFormat": "{{endpoint}}"
}
]
},
{
"type": "timeseries",
"title": "Jitter (ms)",
"id": 4,
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 8 },
"datasource": "$datasource",
"fieldConfig": { "defaults": { "unit": "ms" } },
"targets": [
{
"refId": "A",
"expr": "speedtest_jitter{endpoint=~\"$endpoint\"}",
"legendFormat": "{{endpoint}}"
}
]
},
{
"type": "timeseries",
"title": "Loaded Latency Down/Up (ms)",
"id": 5,
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 16 },
"datasource": "$datasource",
"targets": [
{
"refId": "A",
"expr": "speedtest_loaded_latency_down{endpoint=~\"$endpoint\"}",
"legendFormat": "down {{endpoint}}"
},
{
"refId": "B",
"expr": "speedtest_loaded_latency_up{endpoint=~\"$endpoint\"}",
"legendFormat": "up {{endpoint}}"
}
]
},
{
"type": "timeseries",
"title": "Loaded Jitter Down/Up (ms)",
"id": 6,
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 16 },
"datasource": "$datasource",
"targets": [
{
"refId": "A",
"expr": "speedtest_loaded_jitter_down{endpoint=~\"$endpoint\"}",
"legendFormat": "down {{endpoint}}"
},
{
"refId": "B",
"expr": "speedtest_loaded_jitter_up{endpoint=~\"$endpoint\"}",
"legendFormat": "up {{endpoint}}"
}
]
}
],
"templating": {
"list": [
{
"name": "datasource",
"type": "datasource",
"query": "prometheus",
"current": {}
},
{
"name": "endpoint",
"type": "query",
"datasource": "$datasource",
"refresh": 1,
"multi": true,
"includeAll": true,
"query": "label_values(speedtest_download, endpoint)"
}
]
}
}
services:
speedtest:
image: ghcr.io/akvorrat/netzbremse-measurement:latest
container_name: netzbremse-speedtest
restart: unless-stopped
environment:
# TODO: Read the privacy policy at https://netzbremse.de/speed and
# Cloudflare terms at https://www.cloudflare.com/privacypolicy/
# Set to "true" to accept the policies
NB_SPEEDTEST_ACCEPT_POLICY: true
NB_SPEEDTEST_JSON_OUT_DIR: './results' # Optional: Stores results as JSON files in a local folder (required for data visualization)
volumes:
- ./results/:/app/results/
deploy: # Optional: Sensible resource limits, remove this section if not supported by your platform
resources:
limits:
memory: 1G
cpus: '1'
speedtest-exporter:
build:
context: .
dockerfile: ExporterDockerfile
container_name: speedtest-exporter
ports:
- "3100:8080"
volumes:
- ./results:/data
restart: unless-stopped
#!/usr/bin/env python3
import json
import os
import re
import datetime
from http.server import BaseHTTPRequestHandler, HTTPServer
DATA_DIR = "/data"
MAX_FILE_AGE_DAYS = 7
FILENAME_REGEX = r"speedtest-(\d{4}-\d{2}-\d{2})T(\d{2})-(\d{2})-(\d{2})-(\d{3})Z\.json"
def parse_timestamp(filename):
m = re.match(FILENAME_REGEX, filename)
if not m:
return None
date = m.group(1)
h, mi, s, ms = m.group(2), m.group(3), m.group(4), m.group(5)
timestr = f"{date} {h}:{mi}:{s}.{ms}"
return datetime.datetime.strptime(
timestr, "%Y-%m-%d %H:%M:%S.%f"
).replace(tzinfo=datetime.timezone.utc)
def delete_old_files():
now = datetime.datetime.now(datetime.timezone.utc)
threshold = now - datetime.timedelta(days=MAX_FILE_AGE_DAYS)
for filename in os.listdir(DATA_DIR):
ts = parse_timestamp(filename)
if not ts:
continue
if ts < threshold:
os.remove(os.path.join(DATA_DIR, filename))
def load_measurements():
for filename in os.listdir(DATA_DIR):
if not filename.endswith(".json"):
continue
ts = parse_timestamp(filename)
if not ts:
continue
path = os.path.join(DATA_DIR, filename)
try:
with open(path, "r") as f:
data = json.load(f)
yield ts, data
except:
continue
def format_prometheus_line(metric, labels, value, timestamp):
label_str = ",".join([f'{k}="{v}"' for k, v in labels.items()])
ts_ms = int(timestamp.timestamp() * 1000)
return f"{metric}{{{label_str}}} {value} {ts_ms}"
def to_prometheus_output():
delete_old_files()
lines = [
"# TYPE speedtest_download gauge",
"# TYPE speedtest_upload gauge",
"# TYPE speedtest_latency gauge",
"# TYPE speedtest_jitter gauge",
"# TYPE speedtest_loaded_latency_down gauge",
"# TYPE speedtest_loaded_jitter_down gauge",
"# TYPE speedtest_loaded_latency_up gauge",
"# TYPE speedtest_loaded_jitter_up gauge"
]
for ts, entry in load_measurements():
endpoint = entry["endpoint"]
r = entry["result"]
labels = {"endpoint": endpoint}
metrics = {
"speedtest_download": r["download"],
"speedtest_upload": r["upload"],
"speedtest_latency": r["latency"],
"speedtest_jitter": r["jitter"],
"speedtest_loaded_latency_down": r["downLoadedLatency"],
"speedtest_loaded_jitter_down": r["downLoadedJitter"],
"speedtest_loaded_latency_up": r["upLoadedLatency"],
"speedtest_loaded_jitter_up": r["upLoadedJitter"],
}
for metric, value in metrics.items():
lines.append(
format_prometheus_line(metric, labels, value, ts)
)
return "\n".join(lines) + "\n"
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path != "/metrics":
self.send_response(404)
self.end_headers()
return
out = to_prometheus_output()
self.send_response(200)
self.send_header("Content-Type", "text/plain; version=0.0.4")
self.end_headers()
self.wfile.write(out.encode())
def run():
server = HTTPServer(("0.0.0.0", 8080), Handler)
server.serve_forever()
if __name__ == "__main__":
run()
FROM python:3.11-slim
WORKDIR /app
COPY exporter.py /app/exporter.py
VOLUME ["/data"]
EXPOSE 8080
# Healthcheck prüft, ob /metrics erreichbar ist
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD wget -qO- http://localhost:8080/metrics || exit 1
CMD ["python3", "/app/exporter.py"]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment