torrent-audit: init
This commit is contained in:
@@ -40,6 +40,7 @@
|
||||
./services/arr/jellyseerr.nix
|
||||
./services/arr/recyclarr.nix
|
||||
./services/arr/arr-search.nix
|
||||
./services/arr/torrent-audit.nix
|
||||
./services/arr/init.nix
|
||||
|
||||
./services/soulseek.nix
|
||||
|
||||
40
services/arr/torrent-audit.nix
Normal file
40
services/arr/torrent-audit.nix
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
pkgs,
|
||||
config,
|
||||
service_configs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
systemd.services.torrent-audit = {
|
||||
description = "Audit qBittorrent for unmanaged and abandoned upgrade torrents";
|
||||
after = [
|
||||
"network-online.target"
|
||||
"sonarr.service"
|
||||
"radarr.service"
|
||||
"qbittorrent.service"
|
||||
];
|
||||
wants = [ "network-online.target" ];
|
||||
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = "+${
|
||||
pkgs.python3.withPackages (
|
||||
ps: with ps; [
|
||||
pyarr
|
||||
qbittorrent-api
|
||||
]
|
||||
)
|
||||
}/bin/python ${./torrent-audit.py}";
|
||||
TimeoutSec = 300;
|
||||
};
|
||||
|
||||
environment = {
|
||||
QBITTORRENT_URL = "http://${config.vpnNamespaces.wg.namespaceAddress}:${builtins.toString service_configs.ports.private.torrent.port}";
|
||||
RADARR_URL = "http://localhost:${builtins.toString service_configs.ports.private.radarr.port}";
|
||||
RADARR_CONFIG = "${service_configs.radarr.dataDir}/config.xml";
|
||||
SONARR_URL = "http://localhost:${builtins.toString service_configs.ports.private.sonarr.port}";
|
||||
SONARR_CONFIG = "${service_configs.sonarr.dataDir}/config.xml";
|
||||
CATEGORIES = "tvshows,movies,anime";
|
||||
};
|
||||
};
|
||||
}
|
||||
333
services/arr/torrent-audit.py
Normal file
333
services/arr/torrent-audit.py
Normal file
@@ -0,0 +1,333 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Audit qBittorrent torrents against Radarr/Sonarr.
|
||||
|
||||
Reports two categories:
|
||||
|
||||
UNMANAGED -- torrents in qBittorrent that no *arr service has ever touched.
|
||||
These were added manually or by some other tool.
|
||||
|
||||
ABANDONED -- torrents that *arr grabbed but later replaced with a better
|
||||
version. The old torrent is still seeding while the library
|
||||
points to the new one.
|
||||
|
||||
Abandoned detection uses API cross-referencing (not filesystem hardlinks) and
|
||||
verifies against the *arr's current file state:
|
||||
|
||||
1. HISTORY -- group imports by content unit (movieId / episodeId); the
|
||||
most recent import is the keeper, older ones are candidates.
|
||||
2. CURRENT -- verify against the *arr's active file mapping.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import qbittorrentapi
|
||||
from pyarr import RadarrAPI, SonarrAPI
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s %(levelname)s %(message)s",
|
||||
stream=sys.stderr,
|
||||
)
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_api_key(config_path: str) -> str:
|
||||
tree = ElementTree.parse(config_path)
|
||||
return tree.find(".//ApiKey").text
|
||||
|
||||
|
||||
def paginate(arr_client, endpoint: str, page_size: int = 1000):
|
||||
method = getattr(arr_client, f"get_{endpoint}")
|
||||
page = 1
|
||||
while True:
|
||||
data = method(page=page, page_size=page_size)
|
||||
yield from data["records"]
|
||||
if page * page_size >= data["totalRecords"]:
|
||||
break
|
||||
page += 1
|
||||
|
||||
|
||||
def get_qbit_torrents(qbit_client, category: str) -> dict[str, dict]:
|
||||
torrents = qbit_client.torrents_info(category=category)
|
||||
return {t["hash"].upper(): t for t in torrents}
|
||||
|
||||
|
||||
def gib(size_bytes: int) -> str:
|
||||
return f"{size_bytes / 1073741824:.1f}"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Collect all known hashes from *arr history + queue
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def collect_all_known_hashes(arr_client, page_size: int = 1000) -> set[str]:
|
||||
hashes = set()
|
||||
for endpoint in ("queue", "history"):
|
||||
for rec in paginate(arr_client, endpoint, page_size):
|
||||
did = (rec.get("downloadId") or "").upper()
|
||||
if did:
|
||||
hashes.add(did)
|
||||
return hashes
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Unmanaged: torrents with hashes not in any *arr history/queue
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def find_unmanaged(qbit_torrents: dict, known_hashes: set) -> list[dict]:
|
||||
results = []
|
||||
for uhash, torrent in qbit_torrents.items():
|
||||
if uhash not in known_hashes:
|
||||
results.append(torrent)
|
||||
return sorted(results, key=lambda t: t["added_on"])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Abandoned movies: group imports by movieId, older = abandoned
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def find_movie_abandoned(radarr, qbit_movies):
|
||||
log.info("Analysing Radarr import history ...")
|
||||
imports_by_movie = defaultdict(list)
|
||||
for rec in paginate(radarr, "history"):
|
||||
if rec.get("eventType") != "downloadFolderImported":
|
||||
continue
|
||||
did = (rec.get("downloadId") or "").upper()
|
||||
if not did:
|
||||
continue
|
||||
mid = rec.get("movieId")
|
||||
if not mid:
|
||||
continue
|
||||
imports_by_movie[mid].append(
|
||||
{"downloadId": did, "date": rec["date"]}
|
||||
)
|
||||
|
||||
# Identify keeper (latest) and abandoned (older) hashes per movie.
|
||||
abandoned_hashes: set[str] = set()
|
||||
keeper_hashes: set[str] = set()
|
||||
hash_to_movie: dict[str, int] = {}
|
||||
|
||||
for mid, events in imports_by_movie.items():
|
||||
ordered = sorted(events, key=lambda e: e["date"])
|
||||
keeper_hashes.add(ordered[-1]["downloadId"])
|
||||
for e in ordered[:-1]:
|
||||
abandoned_hashes.add(e["downloadId"])
|
||||
hash_to_movie[e["downloadId"]] = mid
|
||||
|
||||
# A hash that is a keeper for *any* movie must not be deleted.
|
||||
abandoned_hashes -= keeper_hashes
|
||||
|
||||
log.info("Fetching Radarr current movie state ...")
|
||||
radarr_movies = {m["id"]: m for m in radarr.get_movie()}
|
||||
|
||||
results = []
|
||||
for ahash in abandoned_hashes:
|
||||
torrent = qbit_movies.get(ahash)
|
||||
if torrent is None:
|
||||
continue
|
||||
|
||||
mid = hash_to_movie.get(ahash)
|
||||
movie = radarr_movies.get(mid) if mid else None
|
||||
mf = (movie or {}).get("movieFile") or {}
|
||||
|
||||
current_quality = (mf.get("quality") or {}).get("quality", {}).get("name", "?")
|
||||
current_size = mf.get("size", 0)
|
||||
|
||||
status = "SAFE"
|
||||
notes = []
|
||||
|
||||
if not movie or not movie.get("hasFile"):
|
||||
notes.append("movie removed or has no file in Radarr")
|
||||
status = "REVIEW"
|
||||
elif torrent["size"] > current_size * 1.05:
|
||||
notes.append(
|
||||
f"abandoned is larger than current "
|
||||
f"({gib(torrent['size'])} > {gib(current_size)} GiB)"
|
||||
)
|
||||
status = "REVIEW"
|
||||
|
||||
results.append(
|
||||
{
|
||||
"name": torrent["name"],
|
||||
"size": torrent["size"],
|
||||
"state": torrent["state"],
|
||||
"hash": torrent["hash"],
|
||||
"added_on": torrent["added_on"],
|
||||
"status": status,
|
||||
"notes": notes,
|
||||
"current_quality": current_quality,
|
||||
}
|
||||
)
|
||||
|
||||
return sorted(results, key=lambda r: r["added_on"])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Abandoned TV: group imports by episodeId, a hash is abandoned only when
|
||||
# it is NOT the latest import for ANY episode it covers.
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def find_tv_abandoned(sonarr, qbit_tvshows):
|
||||
log.info("Analysing Sonarr import history ...")
|
||||
episode_imports = defaultdict(list)
|
||||
all_download_ids: set[str] = set()
|
||||
hash_to_series: dict[str, int] = {}
|
||||
|
||||
for rec in paginate(sonarr, "history"):
|
||||
if rec.get("eventType") != "downloadFolderImported":
|
||||
continue
|
||||
did = (rec.get("downloadId") or "").upper()
|
||||
eid = rec.get("episodeId")
|
||||
if not did or not eid:
|
||||
continue
|
||||
episode_imports[eid].append({"downloadId": did, "date": rec["date"]})
|
||||
all_download_ids.add(did)
|
||||
sid = rec.get("seriesId")
|
||||
if sid:
|
||||
hash_to_series[did] = sid
|
||||
|
||||
# A hash is "active" if it is the latest import for *any* episode.
|
||||
active_hashes: set[str] = set()
|
||||
for events in episode_imports.values():
|
||||
latest = max(events, key=lambda e: e["date"])
|
||||
active_hashes.add(latest["downloadId"])
|
||||
|
||||
abandoned_hashes = all_download_ids - active_hashes
|
||||
|
||||
log.info("Fetching Sonarr current series state ...")
|
||||
current_series = {s["id"] for s in sonarr.get_series()}
|
||||
|
||||
results = []
|
||||
for ahash in abandoned_hashes:
|
||||
torrent = qbit_tvshows.get(ahash)
|
||||
if torrent is None:
|
||||
continue
|
||||
|
||||
status = "SAFE"
|
||||
notes = []
|
||||
sid = hash_to_series.get(ahash)
|
||||
if sid and sid not in current_series:
|
||||
notes.append("series removed from Sonarr")
|
||||
status = "REVIEW"
|
||||
|
||||
results.append(
|
||||
{
|
||||
"name": torrent["name"],
|
||||
"size": torrent["size"],
|
||||
"state": torrent["state"],
|
||||
"hash": torrent["hash"],
|
||||
"added_on": torrent["added_on"],
|
||||
"status": status,
|
||||
"notes": notes,
|
||||
}
|
||||
)
|
||||
|
||||
return sorted(results, key=lambda r: r["added_on"])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Report
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def print_section(torrents, show_status=False):
|
||||
if not torrents:
|
||||
print(" (none)\n")
|
||||
return
|
||||
|
||||
total_size = sum(t["size"] for t in torrents)
|
||||
for t in torrents:
|
||||
prefix = f"[{t['status']:6s}] " if show_status else " "
|
||||
print(f" {prefix}{t['name']}")
|
||||
extra = f"{gib(t['size'])} GiB | {t['state']}"
|
||||
print(f" {' ' * len(prefix)}{extra}")
|
||||
for note in t.get("notes", []):
|
||||
print(f" {' ' * len(prefix)}** {note}")
|
||||
print()
|
||||
|
||||
if show_status:
|
||||
safe = [t for t in torrents if t["status"] == "SAFE"]
|
||||
review = [t for t in torrents if t["status"] == "REVIEW"]
|
||||
print(
|
||||
f" total={len(torrents)} ({gib(total_size)} GiB) | "
|
||||
f"safe={len(safe)} | review={len(review)}"
|
||||
)
|
||||
else:
|
||||
print(f" total={len(torrents)} ({gib(total_size)} GiB)")
|
||||
print()
|
||||
|
||||
|
||||
def main():
|
||||
qbit_url = os.environ["QBITTORRENT_URL"]
|
||||
radarr_url = os.environ["RADARR_URL"]
|
||||
radarr_config = os.environ["RADARR_CONFIG"]
|
||||
sonarr_url = os.environ["SONARR_URL"]
|
||||
sonarr_config = os.environ["SONARR_CONFIG"]
|
||||
categories = os.environ.get("CATEGORIES", "tvshows,movies,anime").split(",")
|
||||
|
||||
radarr_key = get_api_key(radarr_config)
|
||||
sonarr_key = get_api_key(sonarr_config)
|
||||
|
||||
radarr = RadarrAPI(radarr_url, radarr_key)
|
||||
sonarr = SonarrAPI(sonarr_url, sonarr_key)
|
||||
qbit = qbittorrentapi.Client(host=qbit_url)
|
||||
|
||||
log.info("Getting qBittorrent state ...")
|
||||
qbit_torrents = {cat: get_qbit_torrents(qbit, cat) for cat in categories}
|
||||
for cat, torrents in qbit_torrents.items():
|
||||
log.info(" %s: %d torrents", cat, len(torrents))
|
||||
|
||||
log.info("Collecting known hashes from Sonarr ...")
|
||||
sonarr_hashes = collect_all_known_hashes(sonarr)
|
||||
log.info(" %d unique hashes", len(sonarr_hashes))
|
||||
|
||||
log.info("Collecting known hashes from Radarr ...")
|
||||
radarr_hashes = collect_all_known_hashes(radarr)
|
||||
log.info(" %d unique hashes", len(radarr_hashes))
|
||||
|
||||
all_known = sonarr_hashes | radarr_hashes
|
||||
|
||||
# -- Unmanaged --
|
||||
print("\n========== UNMANAGED TORRENTS ==========\n")
|
||||
for cat in categories:
|
||||
unmanaged = find_unmanaged(qbit_torrents[cat], all_known)
|
||||
print(f"--- {cat} ({len(unmanaged)} unmanaged / {len(qbit_torrents[cat])} total) ---\n")
|
||||
print_section(unmanaged)
|
||||
|
||||
# -- Abandoned --
|
||||
print("========== ABANDONED UPGRADE LEFTOVERS ==========\n")
|
||||
|
||||
movie_abandoned = find_movie_abandoned(
|
||||
radarr, qbit_torrents.get("movies", {})
|
||||
)
|
||||
print(f"--- movies ({len(movie_abandoned)} abandoned) ---\n")
|
||||
print_section(movie_abandoned, show_status=True)
|
||||
|
||||
tv_abandoned = find_tv_abandoned(
|
||||
sonarr, qbit_torrents.get("tvshows", {})
|
||||
)
|
||||
print(f"--- tvshows ({len(tv_abandoned)} abandoned) ---\n")
|
||||
print_section(tv_abandoned, show_status=True)
|
||||
|
||||
# -- Summary --
|
||||
all_abandoned = movie_abandoned + tv_abandoned
|
||||
safe = [t for t in all_abandoned if t["status"] == "SAFE"]
|
||||
|
||||
print("=" * 50)
|
||||
print(
|
||||
f"ABANDONED: {len(all_abandoned)} total ({len(safe)} safe to delete)"
|
||||
)
|
||||
print(f"SAFE TO RECLAIM: {gib(sum(t['size'] for t in safe))} GiB")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -24,4 +24,7 @@ in
|
||||
|
||||
# ntfy alerts test
|
||||
ntfyAlertsTest = handleTest ./ntfy-alerts.nix;
|
||||
|
||||
# torrent audit test
|
||||
torrentAuditTest = handleTest ./torrent-audit.nix;
|
||||
}
|
||||
|
||||
422
tests/torrent-audit.nix
Normal file
422
tests/torrent-audit.nix
Normal file
@@ -0,0 +1,422 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
qbitPort = 18080;
|
||||
radarrPort = 17878;
|
||||
sonarrPort = 18989;
|
||||
|
||||
radarrConfig = pkgs.writeText "radarr-config.xml" ''
|
||||
<Config><ApiKey>test-radarr-key</ApiKey></Config>
|
||||
'';
|
||||
|
||||
sonarrConfig = pkgs.writeText "sonarr-config.xml" ''
|
||||
<Config><ApiKey>test-sonarr-key</ApiKey></Config>
|
||||
'';
|
||||
|
||||
python = "${
|
||||
pkgs.python3.withPackages (ps: [
|
||||
ps.pyarr
|
||||
ps.qbittorrent-api
|
||||
])
|
||||
}/bin/python3";
|
||||
auditScript = ../services/arr/torrent-audit.py;
|
||||
|
||||
# Single mock API server script -- accepts SERVICE and PORT as CLI args.
|
||||
# Routes responses based on SERVICE type (qbit / radarr / sonarr).
|
||||
mockScript = pkgs.writeText "mock-api-server.py" ''
|
||||
import json
|
||||
import sys
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
SERVICE = sys.argv[1]
|
||||
PORT = int(sys.argv[2])
|
||||
|
||||
# ── Hash constants (uppercase, 40 hex chars) ──────────────────────────
|
||||
# Movies
|
||||
UNMANAGED_MOV = "A" * 38 + "01"
|
||||
MANAGED_MOV = "A" * 38 + "02"
|
||||
OLD_MOV = "A" * 38 + "03" # movieId=2, older import → abandoned SAFE
|
||||
NEW_MOV = "A" * 38 + "04" # movieId=2, newer import → keeper
|
||||
KEEPER_CROSS = "A" * 38 + "05" # keeper for movieId=3, old for movieId=4
|
||||
KEEPER3_OLD = "A" * 38 + "0B" # movieId=3, older import (not in qBit)
|
||||
KEEPER4_NEW = "A" * 38 + "06" # movieId=4, newer import → keeper
|
||||
REMOVED_OLD = "A" * 38 + "07" # movieId=5, older import (movie removed)
|
||||
REMOVED_NEW = "A" * 38 + "08" # movieId=5, newer import → keeper (not in qBit)
|
||||
LARGER_OLD = "A" * 38 + "09" # movieId=6, older import (larger than current)
|
||||
LARGER_NEW = "A" * 38 + "0A" # movieId=6, newer import → keeper
|
||||
SINGLE_CROSS = "A" * 38 + "0C" # movieId=7 single import AND older import for movieId=8
|
||||
SINGLE8_NEW = "A" * 38 + "0D" # movieId=8, newer import → keeper (not in qBit)
|
||||
QUEUED_MOV = "A" * 38 + "0E" # in Radarr queue, not in history
|
||||
|
||||
# TV
|
||||
UNMANAGED_TV = "B" * 38 + "01"
|
||||
MANAGED_TV = "B" * 38 + "02" # episodeId=100, single import
|
||||
OLD_TV = "B" * 38 + "03" # episodeId=200, older import → abandoned SAFE
|
||||
NEW_TV = "B" * 38 + "04" # episodeId=200, newer import → active
|
||||
SEASON_PACK = "B" * 38 + "05" # episodeIds 300,301,302 (still active for 301,302)
|
||||
REPACK = "B" * 38 + "06" # episodeId=300, newer import → active
|
||||
REMOVED_TV = "B" * 38 + "07" # episodeId=400, older import (series removed)
|
||||
REMOVED_TV_NEW = "B" * 38 + "08" # episodeId=400, newer import (not in qBit)
|
||||
|
||||
def make_torrent(h, name, size, added_on, state="uploading"):
|
||||
return {
|
||||
"hash": h.lower(),
|
||||
"name": name,
|
||||
"size": size,
|
||||
"state": state,
|
||||
"added_on": added_on,
|
||||
"content_path": f"/downloads/{name}",
|
||||
}
|
||||
|
||||
QBIT_DATA = {
|
||||
"movies": [
|
||||
make_torrent(UNMANAGED_MOV, "Unmanaged.Movie.2024", 5_000_000_000, 1704067200),
|
||||
make_torrent(MANAGED_MOV, "Managed.Movie.2024", 4_000_000_000, 1704067201),
|
||||
make_torrent(OLD_MOV, "Old.Movie.Quality.2024", 3_000_000_000, 1704067202),
|
||||
make_torrent(NEW_MOV, "New.Movie.Quality.2024", 6_000_000_000, 1704067203),
|
||||
make_torrent(KEEPER_CROSS, "CrossRef.Movie.2024", 4_500_000_000, 1704067204),
|
||||
make_torrent(REMOVED_OLD, "Removed.Movie.2024", 3_500_000_000, 1704067205),
|
||||
make_torrent(LARGER_OLD, "Larger.Movie.2024", 10_737_418_240, 1704067206),
|
||||
make_torrent(SINGLE_CROSS, "SingleCross.Movie.2024", 4_000_000_000, 1704067207),
|
||||
make_torrent(QUEUED_MOV, "Queued.Movie.2024", 2_000_000_000, 1704067208),
|
||||
],
|
||||
"tvshows": [
|
||||
make_torrent(UNMANAGED_TV, "Unmanaged.Show.S01E01", 1_000_000_000, 1704067200),
|
||||
make_torrent(MANAGED_TV, "Managed.Show.S01E01", 800_000_000, 1704067201),
|
||||
make_torrent(OLD_TV, "Old.Show.S01E01", 700_000_000, 1704067202),
|
||||
make_torrent(NEW_TV, "New.Show.S01E01", 1_200_000_000, 1704067203),
|
||||
make_torrent(SEASON_PACK, "Season.Pack.S02", 5_000_000_000, 1704067204),
|
||||
make_torrent(REMOVED_TV, "Removed.Show.S01E01", 900_000_000, 1704067205),
|
||||
],
|
||||
}
|
||||
|
||||
# ── Radarr mock data ──────────────────────────────────────────────────
|
||||
RADARR_HISTORY = [
|
||||
{"movieId": 1, "downloadId": MANAGED_MOV, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"movieId": 2, "downloadId": OLD_MOV, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"movieId": 2, "downloadId": NEW_MOV, "eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
{"movieId": 3, "downloadId": KEEPER3_OLD, "eventType": "downloadFolderImported", "date": "2023-01-01T00:00:00Z"},
|
||||
{"movieId": 3, "downloadId": KEEPER_CROSS, "eventType": "downloadFolderImported", "date": "2024-03-01T00:00:00Z"},
|
||||
{"movieId": 4, "downloadId": KEEPER_CROSS, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"movieId": 4, "downloadId": KEEPER4_NEW, "eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
{"movieId": 5, "downloadId": REMOVED_OLD, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"movieId": 5, "downloadId": REMOVED_NEW, "eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
{"movieId": 6, "downloadId": LARGER_OLD, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"movieId": 6, "downloadId": LARGER_NEW, "eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
# Non-import event (should be ignored by abandoned detection)
|
||||
{"movieId": 2, "downloadId": NEW_MOV, "eventType": "grabbed", "date": "2024-05-31T00:00:00Z"},
|
||||
# Single-import keeper test (Fix 13): SINGLE_CROSS is only import for movieId=7
|
||||
# AND an older import for movieId=8 (SINGLE8_NEW is newer for movieId=8)
|
||||
{"movieId": 7, "downloadId": SINGLE_CROSS, "eventType": "downloadFolderImported", "date": "2024-03-01T00:00:00Z"},
|
||||
{"movieId": 8, "downloadId": SINGLE_CROSS, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"movieId": 8, "downloadId": SINGLE8_NEW, "eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
]
|
||||
|
||||
RADARR_MOVIES = [
|
||||
{"id": 1, "hasFile": True, "movieFile": {"size": 4_000_000_000, "quality": {"quality": {"name": "Bluray-1080p"}}}},
|
||||
{"id": 2, "hasFile": True, "movieFile": {"size": 6_000_000_000, "quality": {"quality": {"name": "Remux-1080p"}}}},
|
||||
{"id": 3, "hasFile": True, "movieFile": {"size": 4_500_000_000, "quality": {"quality": {"name": "Bluray-1080p"}}}},
|
||||
{"id": 4, "hasFile": True, "movieFile": {"size": 5_000_000_000, "quality": {"quality": {"name": "Remux-1080p"}}}},
|
||||
# id=5 intentionally MISSING -- movie removed from Radarr
|
||||
{"id": 6, "hasFile": True, "movieFile": {"size": 5_368_709_120, "quality": {"quality": {"name": "Bluray-720p"}}}},
|
||||
{"id": 7, "hasFile": True, "movieFile": {"size": 4_000_000_000, "quality": {"quality": {"name": "Bluray-1080p"}}}},
|
||||
{"id": 8, "hasFile": True, "movieFile": {"size": 5_000_000_000, "quality": {"quality": {"name": "Remux-1080p"}}}},
|
||||
]
|
||||
|
||||
# ── Sonarr mock data ──────────────────────────────────────────────────
|
||||
# Page 1 records (returned on page=1, with totalRecords=1001 to force pagination)
|
||||
SONARR_HISTORY_PAGE1 = [
|
||||
{"episodeId": 100, "seriesId": 1, "downloadId": MANAGED_TV, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"episodeId": 200, "seriesId": 1, "downloadId": OLD_TV, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"episodeId": 200, "seriesId": 1, "downloadId": NEW_TV, "eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
# Season pack covers 3 episodes
|
||||
{"episodeId": 300, "seriesId": 2, "downloadId": SEASON_PACK, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"episodeId": 301, "seriesId": 2, "downloadId": SEASON_PACK, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"episodeId": 302, "seriesId": 2, "downloadId": SEASON_PACK, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
# Non-import event (should be ignored)
|
||||
{"episodeId": 200, "seriesId": 1, "downloadId": NEW_TV, "eventType": "grabbed", "date": "2024-05-31T00:00:00Z"},
|
||||
]
|
||||
# Page 2 records (critical data only available via pagination)
|
||||
SONARR_HISTORY_PAGE2 = [
|
||||
# Episode 300 re-imported from a repack -- but 301,302 still reference SEASON_PACK
|
||||
{"episodeId": 300, "seriesId": 2, "downloadId": REPACK, "eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
# Removed series scenario
|
||||
{"episodeId": 400, "seriesId": 99, "downloadId": REMOVED_TV, "eventType": "downloadFolderImported", "date": "2024-01-01T00:00:00Z"},
|
||||
{"episodeId": 400, "seriesId": 99, "downloadId": REMOVED_TV_NEW,"eventType": "downloadFolderImported", "date": "2024-06-01T00:00:00Z"},
|
||||
]
|
||||
SONARR_HISTORY_ALL = SONARR_HISTORY_PAGE1 + SONARR_HISTORY_PAGE2
|
||||
|
||||
# seriesId=99 intentionally MISSING -- series removed from Sonarr
|
||||
SONARR_SERIES = [
|
||||
{"id": 1, "title": "Managed Show"},
|
||||
{"id": 2, "title": "Season Pack Show"},
|
||||
]
|
||||
|
||||
class Handler(BaseHTTPRequestHandler):
|
||||
def do_POST(self):
|
||||
if self.path.startswith("/api/v2/auth/login"):
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "text/plain")
|
||||
self.send_header("Set-Cookie", "SID=test; path=/")
|
||||
self.end_headers()
|
||||
self.wfile.write(b"Ok.")
|
||||
else:
|
||||
self._handle_json()
|
||||
|
||||
def do_GET(self):
|
||||
self._handle_json()
|
||||
|
||||
def _handle_json(self):
|
||||
parsed = urlparse(self.path)
|
||||
path = parsed.path
|
||||
params = parse_qs(parsed.query)
|
||||
|
||||
content_length = int(self.headers.get("Content-Length", 0))
|
||||
if content_length:
|
||||
body = self.rfile.read(content_length).decode()
|
||||
params.update(parse_qs(body))
|
||||
|
||||
response = self._route(path, params)
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps(response).encode())
|
||||
|
||||
def _route(self, path, params):
|
||||
if SERVICE == "qbit":
|
||||
category = params.get("category", [""])[0]
|
||||
return QBIT_DATA.get(category, [])
|
||||
|
||||
elif SERVICE == "radarr":
|
||||
if path == "/api/v3/history":
|
||||
return {"records": RADARR_HISTORY, "totalRecords": len(RADARR_HISTORY)}
|
||||
elif path == "/api/v3/queue":
|
||||
return {"records": [{"downloadId": QUEUED_MOV}], "totalRecords": 1}
|
||||
elif path == "/api/v3/movie":
|
||||
return RADARR_MOVIES
|
||||
return {}
|
||||
|
||||
elif SERVICE == "sonarr":
|
||||
if path == "/api/v3/history":
|
||||
page = int(params.get("page", ["1"])[0])
|
||||
if page == 1:
|
||||
return {"records": SONARR_HISTORY_PAGE1, "totalRecords": 1001}
|
||||
else:
|
||||
return {"records": SONARR_HISTORY_PAGE2, "totalRecords": 1001}
|
||||
elif path == "/api/v3/queue":
|
||||
return {"records": [], "totalRecords": 0}
|
||||
elif path == "/api/v3/series":
|
||||
return SONARR_SERIES
|
||||
return {}
|
||||
|
||||
return {}
|
||||
|
||||
def log_message(self, fmt, *args):
|
||||
pass
|
||||
|
||||
HTTPServer(("0.0.0.0", PORT), Handler).serve_forever()
|
||||
'';
|
||||
in
|
||||
pkgs.testers.runNixOSTest {
|
||||
name = "torrent-audit";
|
||||
|
||||
nodes.machine =
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
environment.systemPackages = [ pkgs.curl ];
|
||||
|
||||
systemd.services.mock-qbittorrent = {
|
||||
description = "Mock qBittorrent API";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.python3}/bin/python3 ${mockScript} qbit ${toString qbitPort}";
|
||||
Type = "simple";
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.mock-radarr = {
|
||||
description = "Mock Radarr API";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.python3}/bin/python3 ${mockScript} radarr ${toString radarrPort}";
|
||||
Type = "simple";
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.mock-sonarr = {
|
||||
description = "Mock Sonarr API";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.python3}/bin/python3 ${mockScript} sonarr ${toString sonarrPort}";
|
||||
Type = "simple";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
machine.wait_for_unit("multi-user.target")
|
||||
|
||||
# Wait for all mock services to be responsive
|
||||
machine.wait_for_unit("mock-qbittorrent.service")
|
||||
machine.wait_for_unit("mock-radarr.service")
|
||||
machine.wait_for_unit("mock-sonarr.service")
|
||||
machine.wait_until_succeeds(
|
||||
"curl -sf http://localhost:${toString qbitPort}/api/v2/torrents/info?category=movies",
|
||||
timeout=30,
|
||||
)
|
||||
machine.wait_until_succeeds(
|
||||
"curl -sf http://localhost:${toString radarrPort}/api/v3/movie",
|
||||
timeout=30,
|
||||
)
|
||||
machine.wait_until_succeeds(
|
||||
"curl -sf http://localhost:${toString sonarrPort}/api/v3/queue",
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
# Run the audit script and capture stdout
|
||||
output = machine.succeed(
|
||||
"QBITTORRENT_URL=http://localhost:${toString qbitPort} "
|
||||
"RADARR_URL=http://localhost:${toString radarrPort} "
|
||||
"RADARR_CONFIG=${radarrConfig} "
|
||||
"SONARR_URL=http://localhost:${toString sonarrPort} "
|
||||
"SONARR_CONFIG=${sonarrConfig} "
|
||||
"CATEGORIES=movies,tvshows,anime "
|
||||
"${python} ${auditScript}"
|
||||
)
|
||||
|
||||
print("=== SCRIPT OUTPUT ===")
|
||||
print(output)
|
||||
print("=== END OUTPUT ===")
|
||||
|
||||
# Fix 10: Assert section heading exists before splitting
|
||||
assert "ABANDONED UPGRADE LEFTOVERS" in output, \
|
||||
"Output must contain ABANDONED UPGRADE LEFTOVERS heading"
|
||||
|
||||
# Split output into sections for targeted assertions
|
||||
unmanaged_section = output.split("ABANDONED UPGRADE LEFTOVERS")[0]
|
||||
abandoned_section = output.split("ABANDONED UPGRADE LEFTOVERS")[1]
|
||||
|
||||
# Helper: find a torrent name line and check nearby lines (within 3) for a note
|
||||
def assert_note_near(section, torrent_name, note_text):
|
||||
lines = section.splitlines()
|
||||
found_idx = None
|
||||
for i, line in enumerate(lines):
|
||||
if torrent_name in line:
|
||||
found_idx = i
|
||||
break
|
||||
assert found_idx is not None, f"{torrent_name} not found in section"
|
||||
nearby = "\n".join(lines[max(0, found_idx):found_idx + 4])
|
||||
assert note_text in nearby, \
|
||||
f"Expected '{note_text}' near '{torrent_name}', got:\n{nearby}"
|
||||
|
||||
with subtest("Detects unmanaged movie torrent"):
|
||||
assert "Unmanaged.Movie.2024" in unmanaged_section, \
|
||||
"Should detect unmanaged movie"
|
||||
assert "1 unmanaged / 9 total" in unmanaged_section, \
|
||||
"Should show 1 unmanaged movie out of 9"
|
||||
|
||||
with subtest("Detects unmanaged TV torrent"):
|
||||
assert "Unmanaged.Show.S01E01" in unmanaged_section, \
|
||||
"Should detect unmanaged TV show"
|
||||
assert "1 unmanaged / 6 total" in unmanaged_section, \
|
||||
"Should show 1 unmanaged TV show out of 6"
|
||||
|
||||
with subtest("Empty category shows zero counts"):
|
||||
assert "0 unmanaged / 0 total" in unmanaged_section, \
|
||||
"anime category should show 0 unmanaged / 0 total"
|
||||
|
||||
with subtest("Managed torrents are NOT listed as unmanaged"):
|
||||
assert "Managed.Movie.2024" not in unmanaged_section, \
|
||||
"Managed movie should not appear in unmanaged section"
|
||||
assert "Managed.Show.S01E01" not in unmanaged_section, \
|
||||
"Managed TV show should not appear in unmanaged section"
|
||||
|
||||
with subtest("Queue-known hash is NOT listed as unmanaged"):
|
||||
assert "Queued.Movie.2024" not in unmanaged_section, \
|
||||
"Torrent in Radarr queue should not appear as unmanaged"
|
||||
|
||||
with subtest("Detects abandoned movie upgrade as SAFE"):
|
||||
assert "Old.Movie.Quality.2024" in abandoned_section, \
|
||||
"Should detect abandoned movie"
|
||||
for line in abandoned_section.splitlines():
|
||||
if "Old.Movie.Quality.2024" in line:
|
||||
assert "SAFE" in line, f"Old movie should be SAFE, got: {line}"
|
||||
break
|
||||
|
||||
with subtest("Detects abandoned TV episode as SAFE"):
|
||||
assert "Old.Show.S01E01" in abandoned_section, \
|
||||
"Should detect abandoned TV episode"
|
||||
for line in abandoned_section.splitlines():
|
||||
if "Old.Show.S01E01" in line:
|
||||
assert "SAFE" in line, f"Old TV should be SAFE, got: {line}"
|
||||
break
|
||||
|
||||
with subtest("Keeper-also-abandoned hash is NOT listed as abandoned"):
|
||||
assert "CrossRef.Movie.2024" not in abandoned_section, \
|
||||
"Hash that is keeper for another movie must not appear as abandoned"
|
||||
|
||||
with subtest("Season pack NOT abandoned when still active for other episodes"):
|
||||
assert "Season.Pack.S02" not in abandoned_section, \
|
||||
"Season pack still active for episodes 301/302 must not be abandoned"
|
||||
|
||||
with subtest("Negative assertions for keepers"):
|
||||
assert "New.Movie.Quality.2024" not in abandoned_section, \
|
||||
"Keeper for movieId=2 must not appear as abandoned"
|
||||
assert "New.Show.S01E01" not in abandoned_section, \
|
||||
"Keeper for episodeId=200 must not appear as abandoned"
|
||||
assert "Managed.Movie.2024" not in abandoned_section, \
|
||||
"Single-import movie must not appear as abandoned"
|
||||
assert "Managed.Show.S01E01" not in abandoned_section, \
|
||||
"Single-import TV show must not appear as abandoned"
|
||||
|
||||
with subtest("Single-import keeper not abandoned (Bug 1 regression)"):
|
||||
assert "SingleCross.Movie.2024" not in abandoned_section, \
|
||||
"Hash that is sole import for movieId=7 must be in keeper set, not abandoned"
|
||||
|
||||
with subtest("Removed movie triggers REVIEW status"):
|
||||
assert "Removed.Movie.2024" in abandoned_section, \
|
||||
"Should detect abandoned torrent for removed movie"
|
||||
assert_note_near(abandoned_section, "Removed.Movie.2024", "movie removed")
|
||||
for line in abandoned_section.splitlines():
|
||||
if "Removed.Movie.2024" in line:
|
||||
assert "REVIEW" in line, f"Removed movie should be REVIEW, got: {line}"
|
||||
break
|
||||
|
||||
with subtest("Abandoned larger than current triggers REVIEW"):
|
||||
assert "Larger.Movie.2024" in abandoned_section, \
|
||||
"Should detect larger abandoned torrent"
|
||||
assert_note_near(abandoned_section, "Larger.Movie.2024", "abandoned is larger")
|
||||
for line in abandoned_section.splitlines():
|
||||
if "Larger.Movie.2024" in line:
|
||||
assert "REVIEW" in line, f"Larger abandoned should be REVIEW, got: {line}"
|
||||
break
|
||||
|
||||
with subtest("Removed series triggers REVIEW status for TV"):
|
||||
assert "Removed.Show.S01E01" in abandoned_section, \
|
||||
"Should detect abandoned torrent for removed series"
|
||||
assert_note_near(abandoned_section, "Removed.Show.S01E01", "series removed")
|
||||
for line in abandoned_section.splitlines():
|
||||
if "Removed.Show.S01E01" in line:
|
||||
assert "REVIEW" in line, f"Removed series should be REVIEW, got: {line}"
|
||||
break
|
||||
|
||||
with subtest("Correct abandoned counts per category"):
|
||||
assert "movies (3 abandoned)" in abandoned_section, \
|
||||
"Should show 3 abandoned movies"
|
||||
assert "tvshows (2 abandoned)" in abandoned_section, \
|
||||
"Should show 2 abandoned TV shows"
|
||||
|
||||
with subtest("Correct summary totals"):
|
||||
assert "ABANDONED: 5 total (2 safe to delete)" in output, \
|
||||
"Summary should show 5 total abandoned, 2 safe to delete"
|
||||
assert "SAFE TO RECLAIM: 3.4 GiB" in output, \
|
||||
"Should report 3.4 GiB reclaimable (2.8 GiB movie + 0.7 GiB TV)"
|
||||
'';
|
||||
}
|
||||
Reference in New Issue
Block a user