move to python scripts from shell scripts
Allows usage of Servarr python libraries, reduces implementation-specific code.
This commit is contained in:
866
module.nix
866
module.nix
@@ -327,278 +327,426 @@ let
|
||||
};
|
||||
};
|
||||
|
||||
# Map Servarr implementation names to their Newznab parent category names.
|
||||
# Used to auto-detect syncCategories from the Prowlarr API when not explicitly set.
|
||||
implementationCategoryMap = {
|
||||
Sonarr = "TV";
|
||||
Radarr = "Movies";
|
||||
Lidarr = "Audio";
|
||||
Readarr = "Books";
|
||||
Whisparr = "XXX";
|
||||
};
|
||||
# Python environment with pyarr for Servarr API operations and requests for
|
||||
# Prowlarr, Bazarr, and health-check endpoints that pyarr doesn't cover.
|
||||
pythonEnv = pkgs.python3.withPackages (
|
||||
ps: with ps; [
|
||||
pyarr
|
||||
requests
|
||||
]
|
||||
);
|
||||
|
||||
# Emit shell code that sets SYNC_CATEGORIES to a JSON array of category IDs.
|
||||
# When the user provides explicit IDs, use those. Otherwise, query the Prowlarr
|
||||
# /indexer/categories endpoint and collect the parent + all subcategory IDs for
|
||||
# the implementation's Newznab category.
|
||||
mkResolveSyncCategories =
|
||||
app:
|
||||
let
|
||||
hasExplicit = app.syncCategories != [ ];
|
||||
categoryName = implementationCategoryMap.${app.implementation} or null;
|
||||
in
|
||||
if hasExplicit then
|
||||
"SYNC_CATEGORIES=${lib.escapeShellArg (builtins.toJSON app.syncCategories)}"
|
||||
else if categoryName != null then
|
||||
''
|
||||
echo "Auto-detecting sync categories for ${app.implementation}..."
|
||||
ALL_CATEGORIES=$(${curl} -sf "$BASE_URL/indexer/categories" -H "X-Api-Key: $API_KEY")
|
||||
SYNC_CATEGORIES=$(echo "$ALL_CATEGORIES" | ${jq} --arg name ${lib.escapeShellArg categoryName} \
|
||||
'[.[] | select(.name == $name) | .id, .subCategories[].id]')
|
||||
if [ "$SYNC_CATEGORIES" = "[]" ] || [ -z "$SYNC_CATEGORIES" ]; then
|
||||
echo "Warning: could not auto-detect categories for '${categoryName}', using empty list" >&2
|
||||
SYNC_CATEGORIES='[]'
|
||||
else
|
||||
echo "Resolved sync categories: $SYNC_CATEGORIES"
|
||||
fi
|
||||
''
|
||||
else
|
||||
"SYNC_CATEGORIES='[]' ";
|
||||
|
||||
curl = lib.getExe pkgs.curl;
|
||||
jq = lib.getExe pkgs.jq;
|
||||
grep = lib.getExe pkgs.gnugrep;
|
||||
awk = lib.getExe pkgs.gawk;
|
||||
|
||||
mkDownloadClientPayload =
|
||||
dc:
|
||||
builtins.toJSON {
|
||||
enable = true;
|
||||
protocol = dc.protocol;
|
||||
priority = 1;
|
||||
name = dc.name;
|
||||
implementation = dc.implementation;
|
||||
configContract = dc.configContract;
|
||||
fields = lib.mapAttrsToList (n: v: {
|
||||
name = n;
|
||||
value = v;
|
||||
}) dc.fields;
|
||||
tags = [ ];
|
||||
};
|
||||
|
||||
mkDownloadClientSection = dc: ''
|
||||
# Download client: ${dc.name}
|
||||
echo "Checking download client '${dc.name}'..."
|
||||
EXISTING_DC=$(${curl} -sf "$BASE_URL/downloadclient" -H "X-Api-Key: $API_KEY")
|
||||
if echo "$EXISTING_DC" | ${jq} -e --arg name ${lib.escapeShellArg dc.name} '.[] | select(.name == $name)' > /dev/null 2>&1; then
|
||||
echo "Download client '${dc.name}' already exists, skipping"
|
||||
else
|
||||
echo "Adding download client '${dc.name}'..."
|
||||
${curl} -sf -X POST "$BASE_URL/downloadclient?forceSave=true" \
|
||||
-H "X-Api-Key: $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d ${lib.escapeShellArg (mkDownloadClientPayload dc)}
|
||||
echo "Download client '${dc.name}' added"
|
||||
fi
|
||||
'';
|
||||
|
||||
mkRootFolderSection = path: ''
|
||||
# Root folder: ${path}
|
||||
echo "Checking root folder '${path}'..."
|
||||
EXISTING_RF=$(${curl} -sf "$BASE_URL/rootfolder" -H "X-Api-Key: $API_KEY")
|
||||
if echo "$EXISTING_RF" | ${jq} -e --arg path ${lib.escapeShellArg path} '.[] | select(.path == $path)' > /dev/null 2>&1; then
|
||||
echo "Root folder '${path}' already exists, skipping"
|
||||
else
|
||||
echo "Adding root folder '${path}'..."
|
||||
${curl} -sf -X POST "$BASE_URL/rootfolder" \
|
||||
-H "X-Api-Key: $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d ${lib.escapeShellArg (builtins.toJSON { inherit path; })}
|
||||
echo "Root folder '${path}' added"
|
||||
fi
|
||||
'';
|
||||
|
||||
mkSyncedAppSection = app: ''
|
||||
# Synced app: ${app.name}
|
||||
echo "Checking synced app '${app.name}'..."
|
||||
TARGET_API_KEY=$(${grep} -oP '(?<=<ApiKey>)[^<]+' ${lib.escapeShellArg app.apiKeyFrom})
|
||||
EXISTING_APPS=$(${curl} -sf "$BASE_URL/applications" -H "X-Api-Key: $API_KEY")
|
||||
if echo "$EXISTING_APPS" | ${jq} -e --arg name ${lib.escapeShellArg app.name} '.[] | select(.name == $name)' > /dev/null 2>&1; then
|
||||
echo "Synced app '${app.name}' already exists, skipping"
|
||||
else
|
||||
echo "Adding synced app '${app.name}'..."
|
||||
${mkResolveSyncCategories app}
|
||||
PAYLOAD=$(${jq} -n \
|
||||
--arg name ${lib.escapeShellArg app.name} \
|
||||
--arg implementation ${lib.escapeShellArg app.implementation} \
|
||||
--arg configContract ${lib.escapeShellArg app.configContract} \
|
||||
--arg syncLevel ${lib.escapeShellArg app.syncLevel} \
|
||||
--arg prowlarrUrl ${lib.escapeShellArg app.prowlarrUrl} \
|
||||
--arg baseUrl ${lib.escapeShellArg app.baseUrl} \
|
||||
--arg apiKey "$TARGET_API_KEY" \
|
||||
--argjson syncCategories "$SYNC_CATEGORIES" \
|
||||
'{
|
||||
name: $name,
|
||||
implementation: $implementation,
|
||||
configContract: $configContract,
|
||||
syncLevel: $syncLevel,
|
||||
fields: [
|
||||
{name: "prowlarrUrl", value: $prowlarrUrl},
|
||||
{name: "baseUrl", value: $baseUrl},
|
||||
{name: "apiKey", value: $apiKey},
|
||||
{name: "syncCategories", value: $syncCategories}
|
||||
],
|
||||
tags: []
|
||||
}')
|
||||
${curl} -sf -X POST "$BASE_URL/applications?forceSave=true" \
|
||||
-H "X-Api-Key: $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD"
|
||||
echo "Synced app '${app.name}' added"
|
||||
fi
|
||||
'';
|
||||
|
||||
mkNamingSection =
|
||||
inst:
|
||||
lib.optionalString (inst.naming != { }) ''
|
||||
# Naming configuration
|
||||
echo "Checking naming configuration..."
|
||||
CURRENT_NAMING=$(${curl} -sf "$BASE_URL/config/naming" -H "X-Api-Key: $API_KEY")
|
||||
DESIRED_NAMING=${lib.escapeShellArg (builtins.toJSON inst.naming)}
|
||||
NEEDS_UPDATE=$(${jq} -n --argjson current "$CURRENT_NAMING" --argjson desired "$DESIRED_NAMING" \
|
||||
'[$desired | to_entries[] | select(.value != $current[.key])] | length > 0')
|
||||
if [ "$NEEDS_UPDATE" = "true" ]; then
|
||||
echo "Updating naming configuration..."
|
||||
MERGED_NAMING=$(echo "$CURRENT_NAMING" | ${jq} --argjson desired "$DESIRED_NAMING" '. * $desired')
|
||||
${curl} -sf -X PUT "$BASE_URL/config/naming" \
|
||||
-H "X-Api-Key: $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$MERGED_NAMING"
|
||||
echo "Naming configuration updated"
|
||||
else
|
||||
echo "Naming configuration already correct, skipping"
|
||||
fi
|
||||
'';
|
||||
|
||||
mkHealthCheckSection =
|
||||
# Build a JSON configuration blob for a Servarr instance, baked into the
|
||||
# Python init script at Nix evaluation time.
|
||||
mkInitConfig =
|
||||
name: inst:
|
||||
lib.optionalString inst.healthChecks ''
|
||||
# Health checks
|
||||
echo "Running ${name} health checks..."
|
||||
HC_MAX_RETRIES=${builtins.toString inst.healthCheckRetries}
|
||||
HC_INTERVAL=${builtins.toString inst.healthCheckInterval}
|
||||
|
||||
${lib.optionalString (inst.downloadClients != [ ]) ''
|
||||
# Test download client connectivity (with retries)
|
||||
echo "Testing download client connectivity..."
|
||||
DC_ATTEMPT=0
|
||||
while true; do
|
||||
DC_HEALTHY=true
|
||||
DC_TEST=$(${curl} -s --connect-timeout 10 --max-time 30 -X POST "$BASE_URL/downloadclient/testall" \
|
||||
-H "X-Api-Key: $API_KEY" \
|
||||
-H "Content-Type: application/json") || {
|
||||
DC_HEALTHY=false
|
||||
DC_LAST_ERROR="could not reach ${name} API for download client test"
|
||||
}
|
||||
if [ "$DC_HEALTHY" = true ]; then
|
||||
DC_FAILURES=$(echo "$DC_TEST" | ${jq} '[.[] | select(.isValid == false)]')
|
||||
DC_FAIL_COUNT=$(echo "$DC_FAILURES" | ${jq} 'length')
|
||||
if [ "$DC_FAIL_COUNT" -gt 0 ]; then
|
||||
DC_HEALTHY=false
|
||||
DC_LAST_ERROR=$(echo "$DC_FAILURES" | ${jq} -r '.[] | " - ID \(.id): \(.validationFailures | map(.errorMessage) | join(", "))"')
|
||||
fi
|
||||
fi
|
||||
if [ "$DC_HEALTHY" = true ]; then
|
||||
echo "All download clients healthy"
|
||||
break
|
||||
fi
|
||||
DC_ATTEMPT=$((DC_ATTEMPT + 1))
|
||||
if [ "$DC_ATTEMPT" -gt "$HC_MAX_RETRIES" ]; then
|
||||
echo "Health check FAILED after $DC_ATTEMPT attempts: download client(s) unreachable:" >&2
|
||||
echo "$DC_LAST_ERROR" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Download client health check failed (attempt $DC_ATTEMPT/$HC_MAX_RETRIES), retrying in ''${HC_INTERVAL}s..."
|
||||
sleep "$HC_INTERVAL"
|
||||
done
|
||||
''}
|
||||
|
||||
${lib.optionalString (inst.syncedApps != [ ]) ''
|
||||
# Test synced application connectivity (with retries)
|
||||
echo "Testing synced application connectivity..."
|
||||
APP_ATTEMPT=0
|
||||
while true; do
|
||||
APP_HEALTHY=true
|
||||
APP_TEST=$(${curl} -s --connect-timeout 10 --max-time 30 -X POST "$BASE_URL/applications/testall" \
|
||||
-H "X-Api-Key: $API_KEY" \
|
||||
-H "Content-Type: application/json") || {
|
||||
APP_HEALTHY=false
|
||||
APP_LAST_ERROR="could not reach ${name} API for synced app test"
|
||||
}
|
||||
if [ "$APP_HEALTHY" = true ]; then
|
||||
APP_FAILURES=$(echo "$APP_TEST" | ${jq} '[.[] | select(.isValid == false)]')
|
||||
APP_FAIL_COUNT=$(echo "$APP_FAILURES" | ${jq} 'length')
|
||||
if [ "$APP_FAIL_COUNT" -gt 0 ]; then
|
||||
APP_HEALTHY=false
|
||||
APP_LAST_ERROR=$(echo "$APP_FAILURES" | ${jq} -r '.[] | " - ID \(.id): \(.validationFailures | map(.errorMessage) | join(", "))"')
|
||||
fi
|
||||
fi
|
||||
if [ "$APP_HEALTHY" = true ]; then
|
||||
echo "All synced applications healthy"
|
||||
break
|
||||
fi
|
||||
APP_ATTEMPT=$((APP_ATTEMPT + 1))
|
||||
if [ "$APP_ATTEMPT" -gt "$HC_MAX_RETRIES" ]; then
|
||||
echo "Health check FAILED after $APP_ATTEMPT attempts: synced application(s) unreachable:" >&2
|
||||
echo "$APP_LAST_ERROR" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "Synced app health check failed (attempt $APP_ATTEMPT/$HC_MAX_RETRIES), retrying in ''${HC_INTERVAL}s..."
|
||||
sleep "$HC_INTERVAL"
|
||||
done
|
||||
''}
|
||||
|
||||
echo "${name} health checks passed"
|
||||
'';
|
||||
builtins.toJSON {
|
||||
inherit name;
|
||||
inherit (inst)
|
||||
dataDir
|
||||
port
|
||||
apiVersion
|
||||
apiTimeout
|
||||
healthChecks
|
||||
healthCheckRetries
|
||||
healthCheckInterval
|
||||
rootFolders
|
||||
naming
|
||||
;
|
||||
downloadClients = map (dc: {
|
||||
inherit (dc)
|
||||
name
|
||||
implementation
|
||||
configContract
|
||||
protocol
|
||||
fields
|
||||
;
|
||||
}) inst.downloadClients;
|
||||
syncedApps = map (app: {
|
||||
inherit (app)
|
||||
name
|
||||
implementation
|
||||
configContract
|
||||
syncLevel
|
||||
prowlarrUrl
|
||||
baseUrl
|
||||
apiKeyFrom
|
||||
syncCategories
|
||||
;
|
||||
}) inst.syncedApps;
|
||||
};
|
||||
|
||||
mkInitScript =
|
||||
name: inst:
|
||||
pkgs.writeShellScript "${name}-init" ''
|
||||
set -euo pipefail
|
||||
pkgs.writeScript "${name}-init" ''
|
||||
#!${pythonEnv}/bin/python3
|
||||
"""Declarative API initialization for ${name}.
|
||||
|
||||
CONFIG_XML="${inst.dataDir}/config.xml"
|
||||
Uses pyarr (SonarrAPI) for standard Servarr CRUD operations and the
|
||||
requests library for Prowlarr-specific endpoints, health checks, and
|
||||
download-client creation (which needs the forceSave query parameter
|
||||
that pyarr does not expose).
|
||||
"""
|
||||
|
||||
if [ ! -f "$CONFIG_XML" ]; then
|
||||
echo "Config file $CONFIG_XML not found, skipping ${name} init"
|
||||
exit 0
|
||||
fi
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
API_KEY=$(${grep} -oP '(?<=<ApiKey>)[^<]+' "$CONFIG_XML")
|
||||
BASE_URL="http://127.0.0.1:${builtins.toString inst.port}/api/${inst.apiVersion}"
|
||||
import requests as http
|
||||
from pyarr import SonarrAPI
|
||||
|
||||
# Wait for API to become available
|
||||
echo "Waiting for ${name} API (timeout: ${builtins.toString inst.apiTimeout}s)..."
|
||||
for i in $(seq 1 ${builtins.toString inst.apiTimeout}); do
|
||||
if ${curl} -sf --connect-timeout 5 "$BASE_URL/system/status" -H "X-Api-Key: $API_KEY" > /dev/null 2>&1; then
|
||||
echo "${name} API is ready"
|
||||
break
|
||||
fi
|
||||
if [ "$i" -eq ${builtins.toString inst.apiTimeout} ]; then
|
||||
echo "${name} API not available after ${builtins.toString inst.apiTimeout} seconds" >&2
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
CONFIG = json.loads(${builtins.toJSON (mkInitConfig name inst)})
|
||||
|
||||
${lib.concatMapStringsSep "\n" mkDownloadClientSection inst.downloadClients}
|
||||
${lib.concatMapStringsSep "\n" mkRootFolderSection inst.rootFolders}
|
||||
${lib.concatMapStringsSep "\n" mkSyncedAppSection inst.syncedApps}
|
||||
IMPLEMENTATION_CATEGORY_MAP = {
|
||||
"Sonarr": "TV",
|
||||
"Radarr": "Movies",
|
||||
"Lidarr": "Audio",
|
||||
"Readarr": "Books",
|
||||
"Whisparr": "XXX",
|
||||
}
|
||||
|
||||
${mkNamingSection inst}
|
||||
|
||||
${mkHealthCheckSection name inst}
|
||||
def read_api_key(config_xml_path):
|
||||
"""Extract <ApiKey> from a Servarr config.xml file."""
|
||||
with open(config_xml_path) as fh:
|
||||
content = fh.read()
|
||||
match = re.search(r"<ApiKey>([^<]+)</ApiKey>", content)
|
||||
if not match:
|
||||
raise ValueError(f"Could not find ApiKey in {config_xml_path}")
|
||||
return match.group(1)
|
||||
|
||||
echo "${name} init complete"
|
||||
|
||||
def wait_for_api(base_url, api_key, timeout, name):
|
||||
"""Poll the system/status endpoint until the API responds or timeout."""
|
||||
print(f"Waiting for {name} API (timeout: {timeout}s)...")
|
||||
for i in range(1, timeout + 1):
|
||||
try:
|
||||
resp = http.get(
|
||||
f"{base_url}/system/status",
|
||||
headers={"X-Api-Key": api_key},
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
print(f"{name} API is ready")
|
||||
return
|
||||
except (http.ConnectionError, http.Timeout):
|
||||
pass
|
||||
if i == timeout:
|
||||
print(
|
||||
f"{name} API not available after {timeout} seconds",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def ensure_download_clients(client, base_url, api_key, download_clients):
|
||||
"""Idempotently provision download clients."""
|
||||
existing = client.get_download_client()
|
||||
existing_names = {dc["name"] for dc in existing}
|
||||
|
||||
for dc in download_clients:
|
||||
dc_name = dc["name"]
|
||||
print(f"Checking download client '{dc_name}'...")
|
||||
if dc_name in existing_names:
|
||||
print(f"Download client '{dc_name}' already exists, skipping")
|
||||
continue
|
||||
|
||||
print(f"Adding download client '{dc_name}'...")
|
||||
payload = {
|
||||
"enable": True,
|
||||
"protocol": dc["protocol"],
|
||||
"priority": 1,
|
||||
"name": dc_name,
|
||||
"implementation": dc["implementation"],
|
||||
"configContract": dc["configContract"],
|
||||
"fields": [
|
||||
{"name": k, "value": v} for k, v in dc["fields"].items()
|
||||
],
|
||||
"tags": [],
|
||||
}
|
||||
resp = http.post(
|
||||
f"{base_url}/downloadclient",
|
||||
headers={
|
||||
"X-Api-Key": api_key,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
params={"forceSave": "true"},
|
||||
json=payload,
|
||||
timeout=30,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
print(f"Download client '{dc_name}' added")
|
||||
|
||||
|
||||
def ensure_root_folders(client, root_folders):
|
||||
"""Idempotently provision root folders via pyarr."""
|
||||
existing = client.get_root_folder()
|
||||
existing_paths = {rf["path"] for rf in existing}
|
||||
|
||||
for path in root_folders:
|
||||
print(f"Checking root folder '{path}'...")
|
||||
if path in existing_paths:
|
||||
print(f"Root folder '{path}' already exists, skipping")
|
||||
continue
|
||||
|
||||
print(f"Adding root folder '{path}'...")
|
||||
client.add_root_folder(path)
|
||||
print(f"Root folder '{path}' added")
|
||||
|
||||
|
||||
def resolve_sync_categories(base_url, api_key, implementation, explicit):
|
||||
"""Resolve Newznab sync categories, auto-detecting from Prowlarr if needed."""
|
||||
if explicit:
|
||||
return explicit
|
||||
|
||||
category_name = IMPLEMENTATION_CATEGORY_MAP.get(implementation)
|
||||
if not category_name:
|
||||
return []
|
||||
|
||||
print(f"Auto-detecting sync categories for {implementation}...")
|
||||
resp = http.get(
|
||||
f"{base_url}/indexer/categories",
|
||||
headers={"X-Api-Key": api_key},
|
||||
timeout=30,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
||||
sync_cats = []
|
||||
for cat in resp.json():
|
||||
if cat["name"] == category_name:
|
||||
sync_cats.append(cat["id"])
|
||||
for sub in cat.get("subCategories", []):
|
||||
sync_cats.append(sub["id"])
|
||||
|
||||
if not sync_cats:
|
||||
print(
|
||||
f"Warning: could not auto-detect categories for "
|
||||
f"'{category_name}', using empty list",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return []
|
||||
|
||||
print(f"Resolved sync categories: {sync_cats}")
|
||||
return sync_cats
|
||||
|
||||
|
||||
def ensure_synced_apps(base_url, api_key, synced_apps):
|
||||
"""Idempotently provision synced applications (Prowlarr).
|
||||
|
||||
Uses requests directly because pyarr 5.x has no Prowlarr support.
|
||||
"""
|
||||
resp = http.get(
|
||||
f"{base_url}/applications",
|
||||
headers={"X-Api-Key": api_key},
|
||||
timeout=30,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
existing_names = {app["name"] for app in resp.json()}
|
||||
|
||||
for app in synced_apps:
|
||||
app_name = app["name"]
|
||||
print(f"Checking synced app '{app_name}'...")
|
||||
if app_name in existing_names:
|
||||
print(f"Synced app '{app_name}' already exists, skipping")
|
||||
continue
|
||||
|
||||
print(f"Adding synced app '{app_name}'...")
|
||||
target_api_key = read_api_key(app["apiKeyFrom"])
|
||||
sync_categories = resolve_sync_categories(
|
||||
base_url,
|
||||
api_key,
|
||||
app["implementation"],
|
||||
app.get("syncCategories", []),
|
||||
)
|
||||
|
||||
payload = {
|
||||
"name": app_name,
|
||||
"implementation": app["implementation"],
|
||||
"configContract": app["configContract"],
|
||||
"syncLevel": app["syncLevel"],
|
||||
"fields": [
|
||||
{"name": "prowlarrUrl", "value": app["prowlarrUrl"]},
|
||||
{"name": "baseUrl", "value": app["baseUrl"]},
|
||||
{"name": "apiKey", "value": target_api_key},
|
||||
{"name": "syncCategories", "value": sync_categories},
|
||||
],
|
||||
"tags": [],
|
||||
}
|
||||
resp = http.post(
|
||||
f"{base_url}/applications",
|
||||
headers={
|
||||
"X-Api-Key": api_key,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
params={"forceSave": "true"},
|
||||
json=payload,
|
||||
timeout=30,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
print(f"Synced app '{app_name}' added")
|
||||
|
||||
|
||||
def update_naming(client, naming_config):
|
||||
"""Merge desired naming fields into the current config via pyarr."""
|
||||
if not naming_config:
|
||||
return
|
||||
|
||||
print("Checking naming configuration...")
|
||||
current = client.get_config_naming()
|
||||
|
||||
needs_update = any(
|
||||
naming_config.get(k) != current.get(k) for k in naming_config
|
||||
)
|
||||
if not needs_update:
|
||||
print("Naming configuration already correct, skipping")
|
||||
return
|
||||
|
||||
print("Updating naming configuration...")
|
||||
merged = {**current, **naming_config}
|
||||
client.upd_config_naming(merged)
|
||||
print("Naming configuration updated")
|
||||
|
||||
|
||||
def health_check_loop(url, api_key, entity_name, svc_name, max_retries, interval):
|
||||
"""POST to a testall endpoint with retry logic."""
|
||||
attempt = 0
|
||||
while True:
|
||||
healthy = True
|
||||
last_error = ""
|
||||
try:
|
||||
resp = http.post(
|
||||
url,
|
||||
headers={
|
||||
"X-Api-Key": api_key,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
timeout=30,
|
||||
)
|
||||
result = resp.json()
|
||||
failures = [
|
||||
item for item in result if not item.get("isValid", True)
|
||||
]
|
||||
if failures:
|
||||
healthy = False
|
||||
last_error = "\n".join(
|
||||
f" - ID {f['id']}: "
|
||||
+ ", ".join(
|
||||
v["errorMessage"]
|
||||
for v in f.get("validationFailures", [])
|
||||
)
|
||||
for f in failures
|
||||
)
|
||||
except Exception:
|
||||
healthy = False
|
||||
last_error = (
|
||||
f"could not reach {svc_name} API for {entity_name} test"
|
||||
)
|
||||
|
||||
if healthy:
|
||||
print(f"All {entity_name}s healthy")
|
||||
return
|
||||
|
||||
attempt += 1
|
||||
if attempt > max_retries:
|
||||
print(
|
||||
f"Health check FAILED after {attempt} attempts: "
|
||||
f"{entity_name}(s) unreachable:",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(last_error, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(
|
||||
f"{entity_name.capitalize()} health check failed "
|
||||
f"(attempt {attempt}/{max_retries}), "
|
||||
f"retrying in {interval}s..."
|
||||
)
|
||||
time.sleep(interval)
|
||||
|
||||
|
||||
def run_health_checks(base_url, api_key, name, cfg):
|
||||
"""Run connectivity health checks if enabled."""
|
||||
if not cfg["healthChecks"]:
|
||||
return
|
||||
|
||||
print(f"Running {name} health checks...")
|
||||
max_retries = cfg["healthCheckRetries"]
|
||||
interval = cfg["healthCheckInterval"]
|
||||
|
||||
if cfg.get("downloadClients"):
|
||||
print("Testing download client connectivity...")
|
||||
health_check_loop(
|
||||
f"{base_url}/downloadclient/testall",
|
||||
api_key,
|
||||
"download client",
|
||||
name,
|
||||
max_retries,
|
||||
interval,
|
||||
)
|
||||
|
||||
if cfg.get("syncedApps"):
|
||||
print("Testing synced application connectivity...")
|
||||
health_check_loop(
|
||||
f"{base_url}/applications/testall",
|
||||
api_key,
|
||||
"synced application",
|
||||
name,
|
||||
max_retries,
|
||||
interval,
|
||||
)
|
||||
|
||||
print(f"{name} health checks passed")
|
||||
|
||||
|
||||
def main():
|
||||
name = CONFIG["name"]
|
||||
data_dir = CONFIG["dataDir"]
|
||||
port = CONFIG["port"]
|
||||
api_version = CONFIG["apiVersion"]
|
||||
api_timeout = CONFIG["apiTimeout"]
|
||||
|
||||
config_xml = f"{data_dir}/config.xml"
|
||||
if not os.path.isfile(config_xml):
|
||||
print(f"Config file {config_xml} not found, skipping {name} init")
|
||||
return
|
||||
|
||||
api_key = read_api_key(config_xml)
|
||||
base_url = f"http://127.0.0.1:{port}/api/{api_version}"
|
||||
|
||||
wait_for_api(base_url, api_key, api_timeout, name)
|
||||
|
||||
# pyarr client for standard Servarr operations (download clients,
|
||||
# root folders, naming). SonarrAPI is used generically here because
|
||||
# the relevant endpoints are identical across all Servarr applications
|
||||
# and it accepts a custom ver_uri for API version selection.
|
||||
client = SonarrAPI(
|
||||
f"http://127.0.0.1:{port}", api_key, ver_uri=f"/{api_version}"
|
||||
)
|
||||
|
||||
if CONFIG.get("downloadClients"):
|
||||
ensure_download_clients(
|
||||
client, base_url, api_key, CONFIG["downloadClients"]
|
||||
)
|
||||
|
||||
if CONFIG.get("rootFolders"):
|
||||
ensure_root_folders(client, CONFIG["rootFolders"])
|
||||
|
||||
if CONFIG.get("syncedApps"):
|
||||
ensure_synced_apps(base_url, api_key, CONFIG["syncedApps"])
|
||||
|
||||
if CONFIG.get("naming"):
|
||||
update_naming(client, CONFIG["naming"])
|
||||
|
||||
run_health_checks(base_url, api_key, name, CONFIG)
|
||||
|
||||
print(f"{name} init complete")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
'';
|
||||
|
||||
# Get list of service names that syncedApps depend on
|
||||
@@ -613,65 +761,161 @@ let
|
||||
|
||||
enabledInstances = lib.filterAttrs (_: inst: inst.enable) cfg;
|
||||
|
||||
mkBazarrProviderSection =
|
||||
type: provider:
|
||||
let
|
||||
ltype = lib.toLower type;
|
||||
in
|
||||
''
|
||||
# ${type} provider
|
||||
echo "Checking ${type} provider..."
|
||||
PROVIDER_API_KEY=$(${grep} -oP '(?<=<ApiKey>)[^<]+' ${lib.escapeShellArg "${provider.dataDir}/config.xml"})
|
||||
EXISTING=$(${curl} -sf "$BASE_URL/api/system/settings" -H "X-API-KEY: $API_KEY")
|
||||
USE_FLAG=$(echo "$EXISTING" | ${jq} -r '.general.use_${ltype}')
|
||||
EXISTING_KEY=$(echo "$EXISTING" | ${jq} -r '.${ltype}.apikey // ""')
|
||||
if [ "$USE_FLAG" = "true" ] && [ -n "$EXISTING_KEY" ]; then
|
||||
echo "${type} provider already configured, skipping"
|
||||
else
|
||||
echo "Adding ${type} provider..."
|
||||
${curl} -sf -X POST "$BASE_URL/api/system/settings" \
|
||||
-H "X-API-KEY: $API_KEY" \
|
||||
-d "settings-general-use_${ltype}=true" \
|
||||
-d "settings-${ltype}-ip=localhost" \
|
||||
-d "settings-${ltype}-port=${builtins.toString provider.port}" \
|
||||
-d "settings-${ltype}-apikey=$PROVIDER_API_KEY" \
|
||||
-d "settings-${ltype}-ssl=false" \
|
||||
-d "settings-${ltype}-base_url=/"
|
||||
echo "${type} provider added"
|
||||
fi
|
||||
'';
|
||||
mkBazarrInitConfig = builtins.toJSON {
|
||||
dataDir = bazarrCfg.dataDir;
|
||||
port = bazarrCfg.port;
|
||||
apiTimeout = bazarrCfg.apiTimeout;
|
||||
providers =
|
||||
{ }
|
||||
// lib.optionalAttrs bazarrCfg.sonarr.enable {
|
||||
sonarr = {
|
||||
enable = true;
|
||||
dataDir = bazarrCfg.sonarr.dataDir;
|
||||
port = bazarrCfg.sonarr.port;
|
||||
};
|
||||
}
|
||||
// lib.optionalAttrs bazarrCfg.radarr.enable {
|
||||
radarr = {
|
||||
enable = true;
|
||||
dataDir = bazarrCfg.radarr.dataDir;
|
||||
port = bazarrCfg.radarr.port;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
mkBazarrInitScript = pkgs.writeShellScript "bazarr-init" ''
|
||||
set -euo pipefail
|
||||
mkBazarrInitScript = pkgs.writeScript "bazarr-init" ''
|
||||
#!${pythonEnv}/bin/python3
|
||||
"""Declarative API initialization for Bazarr provider connections.
|
||||
|
||||
CONFIG_YAML="${bazarrCfg.dataDir}/config/config.yaml"
|
||||
Uses the requests library directly since Bazarr has its own API that
|
||||
is not compatible with the Servarr/pyarr ecosystem.
|
||||
"""
|
||||
|
||||
if [ ! -f "$CONFIG_YAML" ]; then
|
||||
echo "Config file $CONFIG_YAML not found, skipping bazarr init"
|
||||
exit 0
|
||||
fi
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
API_KEY=$(${awk} '/^auth:/{f=1} f && /apikey:/{gsub(/.*apikey: /, ""); print; exit}' "$CONFIG_YAML")
|
||||
BASE_URL="http://127.0.0.1:${builtins.toString bazarrCfg.port}"
|
||||
import requests as http
|
||||
|
||||
# Wait for API to become available
|
||||
echo "Waiting for Bazarr API (timeout: ${builtins.toString bazarrCfg.apiTimeout}s)..."
|
||||
for i in $(seq 1 ${builtins.toString bazarrCfg.apiTimeout}); do
|
||||
if ${curl} -sf --connect-timeout 5 "$BASE_URL/api/system/status" -H "X-API-KEY: $API_KEY" > /dev/null 2>&1; then
|
||||
echo "Bazarr API is ready"
|
||||
break
|
||||
fi
|
||||
if [ "$i" -eq ${builtins.toString bazarrCfg.apiTimeout} ]; then
|
||||
echo "Bazarr API not available after ${builtins.toString bazarrCfg.apiTimeout} seconds" >&2
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
CONFIG = json.loads(${builtins.toJSON mkBazarrInitConfig})
|
||||
|
||||
${lib.optionalString bazarrCfg.sonarr.enable (mkBazarrProviderSection "Sonarr" bazarrCfg.sonarr)}
|
||||
${lib.optionalString bazarrCfg.radarr.enable (mkBazarrProviderSection "Radarr" bazarrCfg.radarr)}
|
||||
|
||||
echo "Bazarr init complete"
|
||||
def read_api_key_yaml(config_yaml_path):
|
||||
"""Extract the apikey from Bazarr's config.yaml (auth section)."""
|
||||
with open(config_yaml_path) as fh:
|
||||
in_auth = False
|
||||
for line in fh:
|
||||
if line.strip().startswith("auth:"):
|
||||
in_auth = True
|
||||
elif in_auth and "apikey:" in line:
|
||||
return line.split("apikey:")[-1].strip()
|
||||
raise ValueError(f"Could not find apikey in {config_yaml_path}")
|
||||
|
||||
|
||||
def read_api_key_xml(config_xml_path):
|
||||
"""Extract <ApiKey> from a Servarr config.xml file."""
|
||||
with open(config_xml_path) as fh:
|
||||
content = fh.read()
|
||||
match = re.search(r"<ApiKey>([^<]+)</ApiKey>", content)
|
||||
if not match:
|
||||
raise ValueError(f"Could not find ApiKey in {config_xml_path}")
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def wait_for_api(base_url, api_key, timeout):
|
||||
"""Poll Bazarr's system/status endpoint until available or timeout."""
|
||||
print(f"Waiting for Bazarr API (timeout: {timeout}s)...")
|
||||
for i in range(1, timeout + 1):
|
||||
try:
|
||||
resp = http.get(
|
||||
f"{base_url}/api/system/status",
|
||||
headers={"X-API-KEY": api_key},
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
print("Bazarr API is ready")
|
||||
return
|
||||
except (http.ConnectionError, http.Timeout):
|
||||
pass
|
||||
if i == timeout:
|
||||
print(
|
||||
f"Bazarr API not available after {timeout} seconds",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def configure_provider(base_url, api_key, provider_type, provider_config):
|
||||
"""Idempotently configure a Sonarr/Radarr provider in Bazarr."""
|
||||
ltype = provider_type.lower()
|
||||
print(f"Checking {provider_type} provider...")
|
||||
|
||||
resp = http.get(
|
||||
f"{base_url}/api/system/settings",
|
||||
headers={"X-API-KEY": api_key},
|
||||
timeout=30,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
settings = resp.json()
|
||||
|
||||
use_flag = settings.get("general", {}).get(f"use_{ltype}", False)
|
||||
existing_key = settings.get(ltype, {}).get("apikey", "")
|
||||
|
||||
if use_flag and existing_key:
|
||||
print(f"{provider_type} provider already configured, skipping")
|
||||
return
|
||||
|
||||
print(f"Adding {provider_type} provider...")
|
||||
provider_api_key = read_api_key_xml(
|
||||
f"{provider_config['dataDir']}/config.xml"
|
||||
)
|
||||
|
||||
resp = http.post(
|
||||
f"{base_url}/api/system/settings",
|
||||
headers={"X-API-KEY": api_key},
|
||||
data={
|
||||
f"settings-general-use_{ltype}": "true",
|
||||
f"settings-{ltype}-ip": "localhost",
|
||||
f"settings-{ltype}-port": str(provider_config["port"]),
|
||||
f"settings-{ltype}-apikey": provider_api_key,
|
||||
f"settings-{ltype}-ssl": "false",
|
||||
f"settings-{ltype}-base_url": "/",
|
||||
},
|
||||
timeout=30,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
print(f"{provider_type} provider added")
|
||||
|
||||
|
||||
def main():
|
||||
data_dir = CONFIG["dataDir"]
|
||||
port = CONFIG["port"]
|
||||
api_timeout = CONFIG["apiTimeout"]
|
||||
|
||||
config_yaml = f"{data_dir}/config/config.yaml"
|
||||
if not os.path.isfile(config_yaml):
|
||||
print(f"Config file {config_yaml} not found, skipping bazarr init")
|
||||
return
|
||||
|
||||
api_key = read_api_key_yaml(config_yaml)
|
||||
base_url = f"http://127.0.0.1:{port}"
|
||||
|
||||
wait_for_api(base_url, api_key, api_timeout)
|
||||
|
||||
providers = CONFIG.get("providers", {})
|
||||
if providers.get("sonarr", {}).get("enable"):
|
||||
configure_provider(base_url, api_key, "Sonarr", providers["sonarr"])
|
||||
if providers.get("radarr", {}).get("enable"):
|
||||
configure_provider(base_url, api_key, "Radarr", providers["radarr"])
|
||||
|
||||
print("Bazarr init complete")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
'';
|
||||
|
||||
bazarrDeps = [
|
||||
|
||||
Reference in New Issue
Block a user