grafana: more things
All checks were successful
Build and Deploy / deploy (push) Successful in 2m38s
All checks were successful
Build and Deploy / deploy (push) Successful in 2m38s
1. Smoothed out power draw - UPS only reports on 9 watt intervals, so smoothing it out gives more relative detail on trends 2. Add jellyfin integration - Good for seeing correlations between statistics and jellyfin streams 3. intel gpu stats - Provides info on utilization of the gpu
This commit is contained in:
@@ -48,6 +48,7 @@
|
|||||||
|
|
||||||
./services/ups.nix
|
./services/ups.nix
|
||||||
./services/monitoring.nix
|
./services/monitoring.nix
|
||||||
|
./services/jellyfin-annotations.nix
|
||||||
|
|
||||||
./services/bitwarden.nix
|
./services/bitwarden.nix
|
||||||
./services/firefox-syncserver.nix
|
./services/firefox-syncserver.nix
|
||||||
|
|||||||
78
services/intel-gpu-collector.py
Normal file
78
services/intel-gpu-collector.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
TEXTFILE = os.environ.get(
|
||||||
|
"TEXTFILE",
|
||||||
|
"/var/lib/prometheus-node-exporter-textfiles/intel-gpu.prom",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_one_sample():
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
["intel_gpu_top", "-J", "-s", "1000"],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
buf = b""
|
||||||
|
depth = 0
|
||||||
|
in_obj = False
|
||||||
|
deadline = time.monotonic() + 5.0
|
||||||
|
try:
|
||||||
|
while time.monotonic() < deadline:
|
||||||
|
byte = proc.stdout.read(1)
|
||||||
|
if not byte:
|
||||||
|
break
|
||||||
|
if byte == b"{":
|
||||||
|
in_obj = True
|
||||||
|
depth += 1
|
||||||
|
if in_obj:
|
||||||
|
buf += byte
|
||||||
|
if in_obj and byte == b"}":
|
||||||
|
depth -= 1
|
||||||
|
if depth == 0:
|
||||||
|
break
|
||||||
|
finally:
|
||||||
|
proc.terminate()
|
||||||
|
proc.wait()
|
||||||
|
return json.loads(buf) if buf else None
|
||||||
|
|
||||||
|
|
||||||
|
def write_metrics(sample):
|
||||||
|
lines = [
|
||||||
|
"# HELP intel_gpu_engine_busy_percent Intel GPU engine busy percentage",
|
||||||
|
"# TYPE intel_gpu_engine_busy_percent gauge",
|
||||||
|
]
|
||||||
|
for engine, data in sample.get("engines", {}).items():
|
||||||
|
lines.append(
|
||||||
|
f'intel_gpu_engine_busy_percent{{engine="{engine}"}} {data.get("busy", 0)}'
|
||||||
|
)
|
||||||
|
freq = sample.get("frequency", {})
|
||||||
|
lines += [
|
||||||
|
"# HELP intel_gpu_frequency_mhz Intel GPU actual frequency in MHz",
|
||||||
|
"# TYPE intel_gpu_frequency_mhz gauge",
|
||||||
|
f'intel_gpu_frequency_mhz {freq.get("actual", 0)}',
|
||||||
|
"# HELP intel_gpu_rc6_percent Intel GPU RC6 power-saving state percentage",
|
||||||
|
"# TYPE intel_gpu_rc6_percent gauge",
|
||||||
|
f'intel_gpu_rc6_percent {sample.get("rc6", {}).get("value", 0)}',
|
||||||
|
]
|
||||||
|
|
||||||
|
tmp = TEXTFILE + ".tmp"
|
||||||
|
with open(tmp, "w") as f:
|
||||||
|
f.write("\n".join(lines) + "\n")
|
||||||
|
os.replace(tmp, TEXTFILE)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
sample = read_one_sample()
|
||||||
|
if sample is None:
|
||||||
|
print("Failed to read intel_gpu_top sample", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
write_metrics(sample)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
40
services/jellyfin-annotations.nix
Normal file
40
services/jellyfin-annotations.nix
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
config,
|
||||||
|
pkgs,
|
||||||
|
service_configs,
|
||||||
|
lib,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
{
|
||||||
|
systemd.services.jellyfin-annotations = {
|
||||||
|
description = "Jellyfin stream annotation service for Grafana";
|
||||||
|
after = [
|
||||||
|
"network.target"
|
||||||
|
"grafana.service"
|
||||||
|
];
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
serviceConfig = {
|
||||||
|
ExecStart = "${pkgs.python3}/bin/python3 ${./jellyfin-annotations.py}";
|
||||||
|
Restart = "always";
|
||||||
|
RestartSec = "10s";
|
||||||
|
LoadCredential = "jellyfin-api-key:${config.age.secrets.jellyfin-api-key.path}";
|
||||||
|
DynamicUser = true;
|
||||||
|
StateDirectory = "jellyfin-annotations";
|
||||||
|
NoNewPrivileges = true;
|
||||||
|
ProtectSystem = "strict";
|
||||||
|
ProtectHome = true;
|
||||||
|
PrivateTmp = true;
|
||||||
|
RestrictAddressFamilies = [
|
||||||
|
"AF_INET"
|
||||||
|
"AF_INET6"
|
||||||
|
];
|
||||||
|
MemoryDenyWriteExecute = true;
|
||||||
|
};
|
||||||
|
environment = {
|
||||||
|
JELLYFIN_URL = "http://127.0.0.1:${toString service_configs.ports.private.jellyfin.port}";
|
||||||
|
GRAFANA_URL = "http://127.0.0.1:${toString service_configs.ports.private.grafana.port}";
|
||||||
|
STATE_FILE = "/var/lib/jellyfin-annotations/state.json";
|
||||||
|
POLL_INTERVAL = "30";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
233
services/jellyfin-annotations.py
Normal file
233
services/jellyfin-annotations.py
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import urllib.request
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
JELLYFIN_URL = os.environ.get("JELLYFIN_URL", "http://127.0.0.1:8096")
|
||||||
|
GRAFANA_URL = os.environ.get("GRAFANA_URL", "http://127.0.0.1:3000")
|
||||||
|
STATE_FILE = os.environ.get("STATE_FILE", "/var/lib/jellyfin-annotations/state.json")
|
||||||
|
POLL_INTERVAL = int(os.environ.get("POLL_INTERVAL", "30"))
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_key():
|
||||||
|
cred_dir = os.environ.get("CREDENTIALS_DIRECTORY")
|
||||||
|
if cred_dir:
|
||||||
|
return Path(cred_dir, "jellyfin-api-key").read_text().strip()
|
||||||
|
for p in ["/run/agenix/jellyfin-api-key"]:
|
||||||
|
if Path(p).exists():
|
||||||
|
return Path(p).read_text().strip()
|
||||||
|
sys.exit("ERROR: Cannot find jellyfin-api-key")
|
||||||
|
|
||||||
|
|
||||||
|
def http_json(method, url, body=None):
|
||||||
|
data = json.dumps(body).encode() if body is not None else None
|
||||||
|
req = urllib.request.Request(
|
||||||
|
url,
|
||||||
|
data=data,
|
||||||
|
headers={"Content-Type": "application/json", "Accept": "application/json"},
|
||||||
|
method=method,
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(req, timeout=5) as resp:
|
||||||
|
return json.loads(resp.read())
|
||||||
|
|
||||||
|
|
||||||
|
def get_active_sessions(api_key):
|
||||||
|
try:
|
||||||
|
req = urllib.request.Request(
|
||||||
|
f"{JELLYFIN_URL}/Sessions?api_key={api_key}",
|
||||||
|
headers={"Accept": "application/json"},
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(req, timeout=5) as resp:
|
||||||
|
sessions = json.loads(resp.read())
|
||||||
|
return [s for s in sessions if s.get("NowPlayingItem")]
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching sessions: {e}", file=sys.stderr)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _codec(name):
|
||||||
|
if not name:
|
||||||
|
return ""
|
||||||
|
aliases = {"h264": "H.264", "h265": "H.265", "hevc": "H.265", "av1": "AV1",
|
||||||
|
"vp9": "VP9", "vp8": "VP8", "mpeg4": "MPEG-4", "mpeg2video": "MPEG-2",
|
||||||
|
"aac": "AAC", "ac3": "AC3", "eac3": "EAC3", "dts": "DTS",
|
||||||
|
"truehd": "TrueHD", "mp3": "MP3", "opus": "Opus", "flac": "FLAC",
|
||||||
|
"vorbis": "Vorbis"}
|
||||||
|
return aliases.get(name.lower(), name.upper())
|
||||||
|
|
||||||
|
|
||||||
|
def _res(width, height):
|
||||||
|
if not height:
|
||||||
|
return ""
|
||||||
|
common = {2160: "4K", 1440: "1440p", 1080: "1080p", 720: "720p",
|
||||||
|
480: "480p", 360: "360p"}
|
||||||
|
return common.get(height, f"{height}p")
|
||||||
|
|
||||||
|
|
||||||
|
def _channels(n):
|
||||||
|
labels = {1: "Mono", 2: "Stereo", 6: "5.1", 7: "6.1", 8: "7.1"}
|
||||||
|
return labels.get(n, f"{n}ch") if n else ""
|
||||||
|
|
||||||
|
|
||||||
|
def format_label(session):
|
||||||
|
user = session.get("UserName", "Unknown")
|
||||||
|
item = session.get("NowPlayingItem", {}) or {}
|
||||||
|
transcode = session.get("TranscodingInfo") or {}
|
||||||
|
play_state = session.get("PlayState") or {}
|
||||||
|
client = session.get("Client", "")
|
||||||
|
device = session.get("DeviceName", "")
|
||||||
|
|
||||||
|
name = item.get("Name", "Unknown")
|
||||||
|
series = item.get("SeriesName", "")
|
||||||
|
season = item.get("ParentIndexNumber")
|
||||||
|
episode = item.get("IndexNumber")
|
||||||
|
media_type = item.get("Type", "")
|
||||||
|
|
||||||
|
if series and season and episode:
|
||||||
|
title = f"{series} S{season:02d}E{episode:02d} \u2013 {name}"
|
||||||
|
elif series:
|
||||||
|
title = f"{series} \u2013 {name}"
|
||||||
|
elif media_type == "Movie":
|
||||||
|
title = f"{name} (movie)"
|
||||||
|
else:
|
||||||
|
title = name
|
||||||
|
|
||||||
|
play_method = play_state.get("PlayMethod", "")
|
||||||
|
if play_method == "DirectPlay":
|
||||||
|
method = "Direct Play"
|
||||||
|
elif play_method == "DirectStream":
|
||||||
|
method = "Direct Stream"
|
||||||
|
elif play_method == "Transcode" or transcode:
|
||||||
|
method = "Transcode"
|
||||||
|
else:
|
||||||
|
method = "Direct Play"
|
||||||
|
|
||||||
|
media_streams = item.get("MediaStreams") or []
|
||||||
|
video_streams = [s for s in media_streams if s.get("Type") == "Video"]
|
||||||
|
audio_streams = [s for s in media_streams if s.get("Type") == "Audio"]
|
||||||
|
default_audio = next((s for s in audio_streams if s.get("IsDefault")), None)
|
||||||
|
audio_stream = default_audio or (audio_streams[0] if audio_streams else {})
|
||||||
|
video_stream = video_streams[0] if video_streams else {}
|
||||||
|
|
||||||
|
src_vcodec = _codec(video_stream.get("Codec", ""))
|
||||||
|
src_res = _res(video_stream.get("Width") or item.get("Width"),
|
||||||
|
video_stream.get("Height") or item.get("Height"))
|
||||||
|
src_acodec = _codec(audio_stream.get("Codec", ""))
|
||||||
|
src_channels = _channels(audio_stream.get("Channels"))
|
||||||
|
|
||||||
|
is_video_direct = transcode.get("IsVideoDirect", True)
|
||||||
|
is_audio_direct = transcode.get("IsAudioDirect", True)
|
||||||
|
|
||||||
|
if transcode and not is_video_direct:
|
||||||
|
dst_vcodec = _codec(transcode.get("VideoCodec", ""))
|
||||||
|
dst_res = _res(transcode.get("Width"), transcode.get("Height")) or src_res
|
||||||
|
if src_vcodec and dst_vcodec and src_vcodec != dst_vcodec:
|
||||||
|
video_part = f"{src_vcodec}\u2192{dst_vcodec} {dst_res}".strip()
|
||||||
|
else:
|
||||||
|
video_part = f"{dst_vcodec or src_vcodec} {dst_res}".strip()
|
||||||
|
else:
|
||||||
|
video_part = f"{src_vcodec} {src_res}".strip()
|
||||||
|
|
||||||
|
if transcode and not is_audio_direct:
|
||||||
|
dst_acodec = _codec(transcode.get("AudioCodec", ""))
|
||||||
|
dst_channels = _channels(transcode.get("AudioChannels")) or src_channels
|
||||||
|
if src_acodec and dst_acodec and src_acodec != dst_acodec:
|
||||||
|
audio_part = f"{src_acodec}\u2192{dst_acodec} {dst_channels}".strip()
|
||||||
|
else:
|
||||||
|
audio_part = f"{dst_acodec or src_acodec} {dst_channels}".strip()
|
||||||
|
else:
|
||||||
|
audio_part = f"{src_acodec} {src_channels}".strip()
|
||||||
|
|
||||||
|
bitrate = transcode.get("Bitrate") or item.get("Bitrate")
|
||||||
|
bitrate_part = f"{bitrate / 1_000_000:.1f} Mbps" if bitrate else ""
|
||||||
|
|
||||||
|
reasons = transcode.get("TranscodeReasons") or []
|
||||||
|
reason_part = f"[{', '.join(reasons)}]" if reasons else ""
|
||||||
|
|
||||||
|
stream_parts = [p for p in [method, video_part, audio_part, bitrate_part, reason_part] if p]
|
||||||
|
client_str = " \u00b7 ".join(filter(None, [client, device]))
|
||||||
|
|
||||||
|
lines = [f"{user}: {title}", " | ".join(stream_parts)]
|
||||||
|
if client_str:
|
||||||
|
lines.append(client_str)
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def load_state():
|
||||||
|
try:
|
||||||
|
with open(STATE_FILE) as f:
|
||||||
|
return json.load(f)
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def save_state(state):
|
||||||
|
os.makedirs(os.path.dirname(STATE_FILE), exist_ok=True)
|
||||||
|
tmp = STATE_FILE + ".tmp"
|
||||||
|
with open(tmp, "w") as f:
|
||||||
|
json.dump(state, f)
|
||||||
|
os.replace(tmp, STATE_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
def grafana_post(label, start_ms):
|
||||||
|
try:
|
||||||
|
result = http_json(
|
||||||
|
"POST",
|
||||||
|
f"{GRAFANA_URL}/api/annotations",
|
||||||
|
{"time": start_ms, "text": label, "tags": ["jellyfin"]},
|
||||||
|
)
|
||||||
|
return result.get("id")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error posting annotation: {e}", file=sys.stderr)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def grafana_close(grafana_id, end_ms):
|
||||||
|
try:
|
||||||
|
http_json(
|
||||||
|
"PATCH",
|
||||||
|
f"{GRAFANA_URL}/api/annotations/{grafana_id}",
|
||||||
|
{"timeEnd": end_ms},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error closing annotation {grafana_id}: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
api_key = get_api_key()
|
||||||
|
state = load_state()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
now_ms = int(time.time() * 1000)
|
||||||
|
sessions = get_active_sessions(api_key)
|
||||||
|
|
||||||
|
if sessions is not None:
|
||||||
|
current_ids = {s["Id"] for s in sessions}
|
||||||
|
|
||||||
|
for s in sessions:
|
||||||
|
sid = s["Id"]
|
||||||
|
if sid not in state:
|
||||||
|
label = format_label(s)
|
||||||
|
grafana_id = grafana_post(label, now_ms)
|
||||||
|
if grafana_id is not None:
|
||||||
|
state[sid] = {
|
||||||
|
"grafana_id": grafana_id,
|
||||||
|
"label": label,
|
||||||
|
"start_ms": now_ms,
|
||||||
|
}
|
||||||
|
save_state(state)
|
||||||
|
|
||||||
|
for sid in [k for k in state if k not in current_ids]:
|
||||||
|
info = state.pop(sid)
|
||||||
|
grafana_close(info["grafana_id"], now_ms)
|
||||||
|
save_state(state)
|
||||||
|
|
||||||
|
time.sleep(POLL_INTERVAL)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -38,6 +38,17 @@ let
|
|||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
intelGpuCollector = pkgs.writeShellApplication {
|
||||||
|
name = "intel-gpu-collector";
|
||||||
|
runtimeInputs = with pkgs; [
|
||||||
|
python3
|
||||||
|
intel-gpu-tools
|
||||||
|
];
|
||||||
|
text = ''
|
||||||
|
exec python3 ${./intel-gpu-collector.py}
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
dashboard = {
|
dashboard = {
|
||||||
editable = true;
|
editable = true;
|
||||||
graphTooltip = 1;
|
graphTooltip = 1;
|
||||||
@@ -54,6 +65,21 @@ let
|
|||||||
title = "System Overview";
|
title = "System Overview";
|
||||||
uid = "system-overview";
|
uid = "system-overview";
|
||||||
|
|
||||||
|
annotations.list = [
|
||||||
|
{
|
||||||
|
name = "Jellyfin Streams";
|
||||||
|
datasource = {
|
||||||
|
type = "grafana";
|
||||||
|
uid = "-- Grafana --";
|
||||||
|
};
|
||||||
|
enable = true;
|
||||||
|
iconColor = "green";
|
||||||
|
showIn = 0;
|
||||||
|
type = "tags";
|
||||||
|
tags = [ "jellyfin" ];
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
panels = [
|
panels = [
|
||||||
# -- Row 1: UPS --
|
# -- Row 1: UPS --
|
||||||
{
|
{
|
||||||
@@ -415,6 +441,42 @@ let
|
|||||||
graphMode = "area";
|
graphMode = "area";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# -- Row 3: Intel GPU --
|
||||||
|
{
|
||||||
|
id = 8;
|
||||||
|
type = "timeseries";
|
||||||
|
title = "Intel GPU Utilization";
|
||||||
|
gridPos = {
|
||||||
|
h = 8;
|
||||||
|
w = 24;
|
||||||
|
x = 0;
|
||||||
|
y = 16;
|
||||||
|
};
|
||||||
|
datasource = promDs;
|
||||||
|
targets = [
|
||||||
|
{
|
||||||
|
datasource = promDs;
|
||||||
|
expr = "intel_gpu_engine_busy_percent";
|
||||||
|
legendFormat = "{{engine}}";
|
||||||
|
refId = "A";
|
||||||
|
}
|
||||||
|
];
|
||||||
|
fieldConfig = {
|
||||||
|
defaults = {
|
||||||
|
unit = "percent";
|
||||||
|
min = 0;
|
||||||
|
max = 100;
|
||||||
|
color.mode = "palette-classic";
|
||||||
|
custom = {
|
||||||
|
lineWidth = 2;
|
||||||
|
fillOpacity = 10;
|
||||||
|
spanNulls = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
overrides = [ ];
|
||||||
|
};
|
||||||
|
}
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
@@ -500,7 +562,6 @@ in
|
|||||||
root_url = "https://${service_configs.grafana.domain}";
|
root_url = "https://${service_configs.grafana.domain}";
|
||||||
};
|
};
|
||||||
|
|
||||||
# Caddy handles auth -- disable Grafana login entirely
|
|
||||||
"auth.anonymous" = {
|
"auth.anonymous" = {
|
||||||
enabled = true;
|
enabled = true;
|
||||||
org_role = "Admin";
|
org_role = "Admin";
|
||||||
@@ -539,21 +600,17 @@ in
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
# Provision dashboard JSON
|
|
||||||
environment.etc."grafana-dashboards/system-overview.json" = {
|
environment.etc."grafana-dashboards/system-overview.json" = {
|
||||||
text = builtins.toJSON dashboard;
|
text = builtins.toJSON dashboard;
|
||||||
mode = "0444";
|
mode = "0444";
|
||||||
};
|
};
|
||||||
|
|
||||||
# Caddy reverse proxy with auth
|
|
||||||
services.caddy.virtualHosts."${service_configs.grafana.domain}".extraConfig = ''
|
services.caddy.virtualHosts."${service_configs.grafana.domain}".extraConfig = ''
|
||||||
import ${config.age.secrets.caddy_auth.path}
|
import ${config.age.secrets.caddy_auth.path}
|
||||||
reverse_proxy :${builtins.toString service_configs.ports.private.grafana.port}
|
reverse_proxy :${builtins.toString service_configs.ports.private.grafana.port}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# -- Jellyfin metrics collector --
|
# -- Jellyfin active-stream prometheus textfile collector --
|
||||||
# Queries the Jellyfin API for active streams and writes a .prom file
|
|
||||||
# for the node_exporter textfile collector.
|
|
||||||
systemd.services.jellyfin-metrics-collector = {
|
systemd.services.jellyfin-metrics-collector = {
|
||||||
description = "Collect Jellyfin metrics for Prometheus";
|
description = "Collect Jellyfin metrics for Prometheus";
|
||||||
after = [ "network.target" ];
|
after = [ "network.target" ];
|
||||||
@@ -572,7 +629,24 @@ in
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
# Ensure textfile collector directory exists (tmpfs root -- recreated on boot)
|
# -- Intel GPU textfile collector --
|
||||||
|
systemd.services.intel-gpu-collector = {
|
||||||
|
description = "Collect Intel GPU metrics for Prometheus";
|
||||||
|
serviceConfig = {
|
||||||
|
Type = "oneshot";
|
||||||
|
ExecStart = lib.getExe intelGpuCollector;
|
||||||
|
};
|
||||||
|
environment.TEXTFILE = "${textfileDir}/intel-gpu.prom";
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.timers.intel-gpu-collector = {
|
||||||
|
wantedBy = [ "timers.target" ];
|
||||||
|
timerConfig = {
|
||||||
|
OnCalendar = "*:*:0/30";
|
||||||
|
RandomizedDelaySec = "10s";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
systemd.tmpfiles.rules = [
|
systemd.tmpfiles.rules = [
|
||||||
"d ${textfileDir} 0755 root root -"
|
"d ${textfileDir} 0755 root root -"
|
||||||
];
|
];
|
||||||
|
|||||||
243
tests/jellyfin-annotations.nix
Normal file
243
tests/jellyfin-annotations.nix
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
{
|
||||||
|
lib,
|
||||||
|
pkgs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
mockServer = pkgs.writeText "mock-server.py" ''
|
||||||
|
import http.server, json, os, sys
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
MODE = sys.argv[1]
|
||||||
|
PORT = int(sys.argv[2])
|
||||||
|
DATA_FILE = sys.argv[3]
|
||||||
|
|
||||||
|
class Handler(http.server.BaseHTTPRequestHandler):
|
||||||
|
def log_message(self, fmt, *args):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _read_body(self):
|
||||||
|
length = int(self.headers.get("Content-Length", 0))
|
||||||
|
return json.loads(self.rfile.read(length)) if length else {}
|
||||||
|
|
||||||
|
def _json(self, code, body):
|
||||||
|
data = json.dumps(body).encode()
|
||||||
|
self.send_response(code)
|
||||||
|
self.send_header("Content-Type", "application/json")
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(data)
|
||||||
|
|
||||||
|
def do_GET(self):
|
||||||
|
if MODE == "jellyfin" and self.path.startswith("/Sessions"):
|
||||||
|
try:
|
||||||
|
with open(DATA_FILE) as f:
|
||||||
|
sessions = json.load(f)
|
||||||
|
except Exception:
|
||||||
|
sessions = []
|
||||||
|
self._json(200, sessions)
|
||||||
|
else:
|
||||||
|
self.send_response(404)
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
def do_POST(self):
|
||||||
|
if MODE == "grafana" and self.path == "/api/annotations":
|
||||||
|
body = self._read_body()
|
||||||
|
try:
|
||||||
|
with open(DATA_FILE) as f:
|
||||||
|
annotations = json.load(f)
|
||||||
|
except Exception:
|
||||||
|
annotations = []
|
||||||
|
aid = len(annotations) + 1
|
||||||
|
body["id"] = aid
|
||||||
|
annotations.append(body)
|
||||||
|
with open(DATA_FILE, "w") as f:
|
||||||
|
json.dump(annotations, f)
|
||||||
|
self._json(200, {"id": aid, "message": "Annotation added"})
|
||||||
|
else:
|
||||||
|
self.send_response(404)
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
def do_PATCH(self):
|
||||||
|
if MODE == "grafana" and self.path.startswith("/api/annotations/"):
|
||||||
|
aid = int(self.path.rsplit("/", 1)[-1])
|
||||||
|
body = self._read_body()
|
||||||
|
try:
|
||||||
|
with open(DATA_FILE) as f:
|
||||||
|
annotations = json.load(f)
|
||||||
|
except Exception:
|
||||||
|
annotations = []
|
||||||
|
for a in annotations:
|
||||||
|
if a["id"] == aid:
|
||||||
|
a.update(body)
|
||||||
|
with open(DATA_FILE, "w") as f:
|
||||||
|
json.dump(annotations, f)
|
||||||
|
self._json(200, {"message": "Annotation patched"})
|
||||||
|
else:
|
||||||
|
self.send_response(404)
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
http.server.HTTPServer(("127.0.0.1", PORT), Handler).serve_forever()
|
||||||
|
'';
|
||||||
|
|
||||||
|
script = ../services/jellyfin-annotations.py;
|
||||||
|
python = pkgs.python3;
|
||||||
|
in
|
||||||
|
pkgs.testers.runNixOSTest {
|
||||||
|
name = "jellyfin-annotations";
|
||||||
|
|
||||||
|
nodes.machine =
|
||||||
|
{ pkgs, ... }:
|
||||||
|
{
|
||||||
|
environment.systemPackages = [ pkgs.python3 ];
|
||||||
|
};
|
||||||
|
|
||||||
|
testScript = ''
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
|
JELLYFIN_PORT = 18096
|
||||||
|
GRAFANA_PORT = 13000
|
||||||
|
SESSIONS_FILE = "/tmp/sessions.json"
|
||||||
|
ANNOTS_FILE = "/tmp/annotations.json"
|
||||||
|
STATE_FILE = "/tmp/annotations-state.json"
|
||||||
|
CREDS_DIR = "/tmp/test-creds"
|
||||||
|
PYTHON = "${python}/bin/python3"
|
||||||
|
MOCK = "${mockServer}"
|
||||||
|
SCRIPT = "${script}"
|
||||||
|
|
||||||
|
def read_annotations():
|
||||||
|
out = machine.succeed(f"cat {ANNOTS_FILE} 2>/dev/null || echo '[]'")
|
||||||
|
return json.loads(out.strip())
|
||||||
|
|
||||||
|
start_all()
|
||||||
|
machine.wait_for_unit("multi-user.target")
|
||||||
|
|
||||||
|
with subtest("Setup mock credentials and data files"):
|
||||||
|
machine.succeed(f"mkdir -p {CREDS_DIR} && echo 'fake-api-key' > {CREDS_DIR}/jellyfin-api-key")
|
||||||
|
machine.succeed(f"echo '[]' > {SESSIONS_FILE}")
|
||||||
|
machine.succeed(f"echo '[]' > {ANNOTS_FILE}")
|
||||||
|
|
||||||
|
with subtest("Start mock Jellyfin and Grafana servers"):
|
||||||
|
machine.succeed(
|
||||||
|
f"systemd-run --unit=mock-jellyfin {PYTHON} {MOCK} jellyfin {JELLYFIN_PORT} {SESSIONS_FILE}"
|
||||||
|
)
|
||||||
|
machine.succeed(
|
||||||
|
f"systemd-run --unit=mock-grafana {PYTHON} {MOCK} grafana {GRAFANA_PORT} {ANNOTS_FILE}"
|
||||||
|
)
|
||||||
|
machine.wait_until_succeeds(
|
||||||
|
f"curl -sf http://127.0.0.1:{JELLYFIN_PORT}/Sessions", timeout=10
|
||||||
|
)
|
||||||
|
machine.wait_until_succeeds(
|
||||||
|
f"curl -sf -X POST http://127.0.0.1:{GRAFANA_PORT}/api/annotations "
|
||||||
|
f"-H 'Content-Type: application/json' -d '{{\"text\":\"ping\",\"tags\":[]}}' | grep -q id",
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
machine.succeed(f"echo '[]' > {ANNOTS_FILE}")
|
||||||
|
|
||||||
|
with subtest("Start annotation service pointing at mock servers"):
|
||||||
|
machine.succeed(
|
||||||
|
f"systemd-run --unit=annotations-svc "
|
||||||
|
f"--setenv=JELLYFIN_URL=http://127.0.0.1:{JELLYFIN_PORT} "
|
||||||
|
f"--setenv=GRAFANA_URL=http://127.0.0.1:{GRAFANA_PORT} "
|
||||||
|
f"--setenv=CREDENTIALS_DIRECTORY={CREDS_DIR} "
|
||||||
|
f"--setenv=STATE_FILE={STATE_FILE} "
|
||||||
|
f"--setenv=POLL_INTERVAL=3 "
|
||||||
|
f"{PYTHON} {SCRIPT}"
|
||||||
|
)
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
with subtest("No annotations pushed when no streams active"):
|
||||||
|
time.sleep(4)
|
||||||
|
annots = read_annotations()
|
||||||
|
assert annots == [], f"Expected no annotations, got: {annots}"
|
||||||
|
|
||||||
|
with subtest("Annotation created when stream starts"):
|
||||||
|
rich_session = json.dumps([{
|
||||||
|
"Id": "sess-1",
|
||||||
|
"UserName": "alice",
|
||||||
|
"Client": "Infuse",
|
||||||
|
"DeviceName": "iPhone",
|
||||||
|
"PlayState": {"PlayMethod": "Transcode"},
|
||||||
|
"NowPlayingItem": {
|
||||||
|
"Name": "Inception",
|
||||||
|
"Type": "Movie",
|
||||||
|
"Bitrate": 20000000,
|
||||||
|
"MediaStreams": [
|
||||||
|
{"Type": "Video", "Codec": "h264", "Width": 1920, "Height": 1080},
|
||||||
|
{"Type": "Audio", "Codec": "dts", "Channels": 6, "IsDefault": True},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"TranscodingInfo": {
|
||||||
|
"IsVideoDirect": True,
|
||||||
|
"IsAudioDirect": False,
|
||||||
|
"VideoCodec": "h264",
|
||||||
|
"AudioCodec": "aac",
|
||||||
|
"AudioChannels": 2,
|
||||||
|
"Bitrate": 8000000,
|
||||||
|
"TranscodeReasons": ["AudioCodecNotSupported"],
|
||||||
|
},
|
||||||
|
}])
|
||||||
|
machine.succeed(f"echo {repr(rich_session)} > {SESSIONS_FILE}")
|
||||||
|
machine.wait_until_succeeds(
|
||||||
|
f"cat {ANNOTS_FILE} | python3 -c \"import sys,json; a=json.load(sys.stdin); exit(0 if a else 1)\"",
|
||||||
|
timeout=15,
|
||||||
|
)
|
||||||
|
annots = read_annotations()
|
||||||
|
assert len(annots) == 1, f"Expected 1 annotation, got: {annots}"
|
||||||
|
text = annots[0]["text"]
|
||||||
|
assert "alice: Inception (movie)" in text, f"Missing title in: {text}"
|
||||||
|
assert "Transcode" in text, f"Missing method in: {text}"
|
||||||
|
assert "H.264" in text, f"Missing video codec in: {text}"
|
||||||
|
assert "DTS" in text and "AAC" in text, f"Missing audio codec in: {text}"
|
||||||
|
assert "8.0 Mbps" in text, f"Missing bitrate in: {text}"
|
||||||
|
assert "AudioCodecNotSupported" in text, f"Missing transcode reason in: {text}"
|
||||||
|
assert "Infuse" in text and "iPhone" in text, f"Missing client in: {text}"
|
||||||
|
assert "jellyfin" in annots[0].get("tags", []), f"Missing jellyfin tag: {annots[0]}"
|
||||||
|
assert "timeEnd" not in annots[0], f"timeEnd should not be set yet: {annots[0]}"
|
||||||
|
|
||||||
|
with subtest("Annotation closed when stream ends"):
|
||||||
|
machine.succeed(f"echo '[]' > {SESSIONS_FILE}")
|
||||||
|
machine.wait_until_succeeds(
|
||||||
|
f"cat {ANNOTS_FILE} | python3 -c \"import sys,json; a=json.load(sys.stdin); exit(0 if a and 'timeEnd' in a[0] else 1)\"",
|
||||||
|
timeout=15,
|
||||||
|
)
|
||||||
|
annots = read_annotations()
|
||||||
|
assert len(annots) == 1, f"Expected 1 annotation, got: {annots}"
|
||||||
|
assert "timeEnd" in annots[0], f"timeEnd should be set: {annots[0]}"
|
||||||
|
assert annots[0]["timeEnd"] > annots[0]["time"], "timeEnd should be after time"
|
||||||
|
|
||||||
|
with subtest("Multiple concurrent streams each get their own annotation"):
|
||||||
|
machine.succeed(f"echo '[]' > {ANNOTS_FILE}")
|
||||||
|
machine.succeed(
|
||||||
|
f"""echo '[
|
||||||
|
{{"Id":"sess-2","UserName":"bob","NowPlayingItem":{{"Name":"Breaking Bad","SeriesName":"Breaking Bad","ParentIndexNumber":1,"IndexNumber":1}}}},
|
||||||
|
{{"Id":"sess-3","UserName":"carol","NowPlayingItem":{{"Name":"Inception","Type":"Movie"}}}}
|
||||||
|
]' > {SESSIONS_FILE}"""
|
||||||
|
)
|
||||||
|
machine.wait_until_succeeds(
|
||||||
|
f"cat {ANNOTS_FILE} | python3 -c \"import sys,json; a=json.load(sys.stdin); exit(0 if len(a)==2 else 1)\"",
|
||||||
|
timeout=15,
|
||||||
|
)
|
||||||
|
annots = read_annotations()
|
||||||
|
assert len(annots) == 2, f"Expected 2 annotations, got: {annots}"
|
||||||
|
texts = sorted(a["text"] for a in annots)
|
||||||
|
assert any("Breaking Bad" in t and "S01E01" in t for t in texts), f"Missing Bob's annotation: {texts}"
|
||||||
|
assert any("carol" in t and "Inception" in t for t in texts), f"Missing Carol's annotation: {texts}"
|
||||||
|
|
||||||
|
with subtest("State survives service restart (no duplicate annotations)"):
|
||||||
|
machine.succeed("systemctl stop annotations-svc || true")
|
||||||
|
time.sleep(1)
|
||||||
|
machine.succeed(
|
||||||
|
f"systemd-run --unit=annotations-svc-2 "
|
||||||
|
f"--setenv=JELLYFIN_URL=http://127.0.0.1:{JELLYFIN_PORT} "
|
||||||
|
f"--setenv=GRAFANA_URL=http://127.0.0.1:{GRAFANA_PORT} "
|
||||||
|
f"--setenv=CREDENTIALS_DIRECTORY={CREDS_DIR} "
|
||||||
|
f"--setenv=STATE_FILE={STATE_FILE} "
|
||||||
|
f"--setenv=POLL_INTERVAL=3 "
|
||||||
|
f"{PYTHON} {SCRIPT}"
|
||||||
|
)
|
||||||
|
time.sleep(6)
|
||||||
|
annots = read_annotations()
|
||||||
|
assert len(annots) == 2, f"Restart should not create duplicates, got: {annots}"
|
||||||
|
'';
|
||||||
|
}
|
||||||
@@ -22,6 +22,9 @@ in
|
|||||||
fail2banImmichTest = handleTest ./fail2ban-immich.nix;
|
fail2banImmichTest = handleTest ./fail2ban-immich.nix;
|
||||||
fail2banJellyfinTest = handleTest ./fail2ban-jellyfin.nix;
|
fail2banJellyfinTest = handleTest ./fail2ban-jellyfin.nix;
|
||||||
|
|
||||||
|
# jellyfin annotation service test
|
||||||
|
jellyfinAnnotationsTest = handleTest ./jellyfin-annotations.nix;
|
||||||
|
|
||||||
# ntfy alerts test
|
# ntfy alerts test
|
||||||
ntfyAlertsTest = handleTest ./ntfy-alerts.nix;
|
ntfyAlertsTest = handleTest ./ntfy-alerts.nix;
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user