Move embedded Python scripts out of Nix string interpolation into standalone files under scripts/. Each script reads its configuration from a JSON file passed as the first CLI argument. Shared utilities (API key reading, API polling, health check loop) are consolidated into common.py, eliminating three copies of read_api_key and wait_for_api. Implementation improvements included in the extraction: - Remove pyarr dependency; all HTTP calls use raw requests - Add update semantics: download clients and synced apps are now compared against desired state and updated on drift via PUT - Bazarr configure_provider compares API keys and updates stale ones - Narrow health_check_loop exception clause from bare Exception to (RequestException, ValueError, KeyError) - Fix double resp.json() call in resolve_profile_id (jellyseerr) - Replace os.system with subprocess.run for Jellyseerr restart - Handle missing 'value' key in Servarr field API responses
377 lines
12 KiB
Python
377 lines
12 KiB
Python
#!/usr/bin/env python3
|
|
"""Declarative API initialization for Servarr applications.
|
|
|
|
Idempotently provisions download clients, root folders, synced applications
|
|
(Prowlarr), and naming configuration via the Servarr HTTP API. Existing
|
|
entities are updated when their configuration drifts from the declared state.
|
|
"""
|
|
|
|
import sys
|
|
|
|
import requests as http
|
|
|
|
from common import (
|
|
health_check_loop,
|
|
load_config,
|
|
read_api_key_xml,
|
|
wait_for_api,
|
|
)
|
|
|
|
# Maps Prowlarr application implementation names to Newznab parent category
|
|
# names used for automatic sync-category detection.
|
|
IMPLEMENTATION_CATEGORY_MAP = {
|
|
"Sonarr": "TV",
|
|
"Radarr": "Movies",
|
|
"Lidarr": "Audio",
|
|
"Readarr": "Books",
|
|
"Whisparr": "XXX",
|
|
}
|
|
|
|
|
|
def _fields_to_dict(fields):
|
|
"""Convert the API's [{name, value}] array into a flat dict.
|
|
|
|
Some API responses omit the 'value' key for null/unset fields.
|
|
"""
|
|
return {f["name"]: f.get("value") for f in fields}
|
|
|
|
|
|
def _dict_to_fields(d):
|
|
"""Convert a flat dict into the API's [{name, value}] array."""
|
|
return [{"name": k, "value": v} for k, v in d.items()]
|
|
|
|
|
|
def _needs_field_update(desired, current_fields):
|
|
"""Return True if any desired field value differs from the current state."""
|
|
current = _fields_to_dict(current_fields)
|
|
return any(desired.get(k) != current.get(k) for k in desired)
|
|
|
|
|
|
# -- Download clients --------------------------------------------------------
|
|
|
|
|
|
def ensure_download_clients(base_url, api_key, download_clients):
|
|
"""Idempotently provision download clients, updating on drift."""
|
|
resp = http.get(
|
|
f"{base_url}/downloadclient",
|
|
headers={"X-Api-Key": api_key},
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
existing_by_name = {dc["name"]: dc for dc in resp.json()}
|
|
|
|
for dc in download_clients:
|
|
dc_name = dc["name"]
|
|
desired_fields = dc["fields"]
|
|
|
|
if dc_name in existing_by_name:
|
|
current = existing_by_name[dc_name]
|
|
drift = (
|
|
current.get("implementation") != dc["implementation"]
|
|
or current.get("configContract") != dc["configContract"]
|
|
or current.get("protocol") != dc["protocol"]
|
|
or _needs_field_update(desired_fields, current.get("fields", []))
|
|
)
|
|
if not drift:
|
|
print(f"Download client '{dc_name}' already correct, skipping")
|
|
continue
|
|
|
|
print(f"Updating download client '{dc_name}'...")
|
|
payload = {**current}
|
|
payload.update(
|
|
implementation=dc["implementation"],
|
|
configContract=dc["configContract"],
|
|
protocol=dc["protocol"],
|
|
fields=_dict_to_fields(desired_fields),
|
|
)
|
|
resp = http.put(
|
|
f"{base_url}/downloadclient/{current['id']}",
|
|
headers={"X-Api-Key": api_key, "Content-Type": "application/json"},
|
|
params={"forceSave": "true"},
|
|
json=payload,
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
print(f"Download client '{dc_name}' updated")
|
|
else:
|
|
print(f"Adding download client '{dc_name}'...")
|
|
payload = {
|
|
"enable": True,
|
|
"protocol": dc["protocol"],
|
|
"priority": 1,
|
|
"name": dc_name,
|
|
"implementation": dc["implementation"],
|
|
"configContract": dc["configContract"],
|
|
"fields": _dict_to_fields(desired_fields),
|
|
"tags": [],
|
|
}
|
|
resp = http.post(
|
|
f"{base_url}/downloadclient",
|
|
headers={"X-Api-Key": api_key, "Content-Type": "application/json"},
|
|
params={"forceSave": "true"},
|
|
json=payload,
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
print(f"Download client '{dc_name}' added")
|
|
|
|
|
|
# -- Root folders ------------------------------------------------------------
|
|
|
|
|
|
def ensure_root_folders(base_url, api_key, root_folders):
|
|
"""Idempotently provision root folders (create-only)."""
|
|
resp = http.get(
|
|
f"{base_url}/rootfolder",
|
|
headers={"X-Api-Key": api_key},
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
existing_paths = {rf["path"] for rf in resp.json()}
|
|
|
|
for path in root_folders:
|
|
if path in existing_paths:
|
|
print(f"Root folder '{path}' already exists, skipping")
|
|
continue
|
|
|
|
print(f"Adding root folder '{path}'...")
|
|
resp = http.post(
|
|
f"{base_url}/rootfolder",
|
|
headers={"X-Api-Key": api_key, "Content-Type": "application/json"},
|
|
json={"path": path},
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
print(f"Root folder '{path}' added")
|
|
|
|
|
|
# -- Synced applications (Prowlarr) ------------------------------------------
|
|
|
|
|
|
def resolve_sync_categories(base_url, api_key, implementation, explicit):
|
|
"""Resolve Newznab sync categories, auto-detecting from Prowlarr if needed."""
|
|
if explicit:
|
|
return explicit
|
|
|
|
category_name = IMPLEMENTATION_CATEGORY_MAP.get(implementation)
|
|
if not category_name:
|
|
return []
|
|
|
|
print(f"Auto-detecting sync categories for {implementation}...")
|
|
resp = http.get(
|
|
f"{base_url}/indexer/categories",
|
|
headers={"X-Api-Key": api_key},
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
|
|
sync_cats = []
|
|
for cat in resp.json():
|
|
if cat["name"] == category_name:
|
|
sync_cats.append(cat["id"])
|
|
for sub in cat.get("subCategories", []):
|
|
sync_cats.append(sub["id"])
|
|
|
|
if not sync_cats:
|
|
print(
|
|
f"Warning: could not auto-detect categories for "
|
|
f"'{category_name}', using empty list",
|
|
file=sys.stderr,
|
|
)
|
|
return []
|
|
|
|
print(f"Resolved sync categories: {sync_cats}")
|
|
return sync_cats
|
|
|
|
|
|
def ensure_synced_apps(base_url, api_key, synced_apps):
|
|
"""Idempotently provision synced applications (Prowlarr), updating on drift."""
|
|
resp = http.get(
|
|
f"{base_url}/applications",
|
|
headers={"X-Api-Key": api_key},
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
existing_by_name = {app["name"]: app for app in resp.json()}
|
|
|
|
for app in synced_apps:
|
|
app_name = app["name"]
|
|
target_api_key = read_api_key_xml(app["apiKeyFrom"])
|
|
sync_categories = resolve_sync_categories(
|
|
base_url, api_key, app["implementation"], app.get("syncCategories", []),
|
|
)
|
|
|
|
desired_fields = {
|
|
"prowlarrUrl": app["prowlarrUrl"],
|
|
"baseUrl": app["baseUrl"],
|
|
"apiKey": target_api_key,
|
|
"syncCategories": sync_categories,
|
|
}
|
|
|
|
if app_name in existing_by_name:
|
|
current = existing_by_name[app_name]
|
|
drift = (
|
|
current.get("implementation") != app["implementation"]
|
|
or current.get("configContract") != app["configContract"]
|
|
or current.get("syncLevel") != app["syncLevel"]
|
|
or _needs_field_update(desired_fields, current.get("fields", []))
|
|
)
|
|
if not drift:
|
|
print(f"Synced app '{app_name}' already correct, skipping")
|
|
continue
|
|
|
|
print(f"Updating synced app '{app_name}'...")
|
|
payload = {**current}
|
|
payload.update(
|
|
implementation=app["implementation"],
|
|
configContract=app["configContract"],
|
|
syncLevel=app["syncLevel"],
|
|
fields=_dict_to_fields(desired_fields),
|
|
)
|
|
resp = http.put(
|
|
f"{base_url}/applications/{current['id']}",
|
|
headers={"X-Api-Key": api_key, "Content-Type": "application/json"},
|
|
params={"forceSave": "true"},
|
|
json=payload,
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
print(f"Synced app '{app_name}' updated")
|
|
else:
|
|
print(f"Adding synced app '{app_name}'...")
|
|
payload = {
|
|
"name": app_name,
|
|
"implementation": app["implementation"],
|
|
"configContract": app["configContract"],
|
|
"syncLevel": app["syncLevel"],
|
|
"fields": _dict_to_fields(desired_fields),
|
|
"tags": [],
|
|
}
|
|
resp = http.post(
|
|
f"{base_url}/applications",
|
|
headers={"X-Api-Key": api_key, "Content-Type": "application/json"},
|
|
params={"forceSave": "true"},
|
|
json=payload,
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
print(f"Synced app '{app_name}' added")
|
|
|
|
|
|
# -- Naming ------------------------------------------------------------------
|
|
|
|
|
|
def update_naming(base_url, api_key, naming_config):
|
|
"""Merge desired naming fields into the current config."""
|
|
if not naming_config:
|
|
return
|
|
|
|
print("Checking naming configuration...")
|
|
resp = http.get(
|
|
f"{base_url}/config/naming",
|
|
headers={"X-Api-Key": api_key},
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
current = resp.json()
|
|
|
|
needs_update = any(
|
|
naming_config.get(k) != current.get(k) for k in naming_config
|
|
)
|
|
if not needs_update:
|
|
print("Naming configuration already correct, skipping")
|
|
return
|
|
|
|
print("Updating naming configuration...")
|
|
merged = {**current, **naming_config}
|
|
resp = http.put(
|
|
f"{base_url}/config/naming",
|
|
headers={"X-Api-Key": api_key, "Content-Type": "application/json"},
|
|
json=merged,
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
print("Naming configuration updated")
|
|
|
|
|
|
# -- Health checks -----------------------------------------------------------
|
|
|
|
|
|
def run_health_checks(base_url, api_key, name, cfg):
|
|
"""Run connectivity health checks if enabled."""
|
|
if not cfg["healthChecks"]:
|
|
return
|
|
|
|
print(f"Running {name} health checks...")
|
|
max_retries = cfg["healthCheckRetries"]
|
|
interval = cfg["healthCheckInterval"]
|
|
|
|
if cfg.get("downloadClients"):
|
|
print("Testing download client connectivity...")
|
|
health_check_loop(
|
|
f"{base_url}/downloadclient/testall",
|
|
api_key,
|
|
"download client",
|
|
name,
|
|
max_retries,
|
|
interval,
|
|
)
|
|
|
|
if cfg.get("syncedApps"):
|
|
print("Testing synced application connectivity...")
|
|
health_check_loop(
|
|
f"{base_url}/applications/testall",
|
|
api_key,
|
|
"synced application",
|
|
name,
|
|
max_retries,
|
|
interval,
|
|
)
|
|
|
|
print(f"{name} health checks passed")
|
|
|
|
|
|
# -- Main --------------------------------------------------------------------
|
|
|
|
|
|
def main():
|
|
cfg = load_config()
|
|
name = cfg["name"]
|
|
data_dir = cfg["dataDir"]
|
|
bind_address = cfg.get("bindAddress", "127.0.0.1")
|
|
port = cfg["port"]
|
|
api_version = cfg["apiVersion"]
|
|
api_timeout = cfg["apiTimeout"]
|
|
|
|
import os
|
|
|
|
config_xml = f"{data_dir}/config.xml"
|
|
if not os.path.isfile(config_xml):
|
|
print(f"Config file {config_xml} not found, skipping {name} init")
|
|
return
|
|
|
|
api_key = read_api_key_xml(config_xml)
|
|
base_url = f"http://{bind_address}:{port}/api/{api_version}"
|
|
|
|
wait_for_api(base_url, api_key, api_timeout, name)
|
|
|
|
if cfg.get("downloadClients"):
|
|
ensure_download_clients(base_url, api_key, cfg["downloadClients"])
|
|
|
|
if cfg.get("rootFolders"):
|
|
ensure_root_folders(base_url, api_key, cfg["rootFolders"])
|
|
|
|
if cfg.get("syncedApps"):
|
|
ensure_synced_apps(base_url, api_key, cfg["syncedApps"])
|
|
|
|
if cfg.get("naming"):
|
|
update_naming(base_url, api_key, cfg["naming"])
|
|
|
|
run_health_checks(base_url, api_key, name, cfg)
|
|
|
|
print(f"{name} init complete")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|