diff --git a/services/llama-cpp/llama-cpp.nix b/services/llama-cpp/llama-cpp.nix index 72d8725..f5e64cd 100644 --- a/services/llama-cpp/llama-cpp.nix +++ b/services/llama-cpp/llama-cpp.nix @@ -46,6 +46,11 @@ in # have to do this in order to get vulkan to work systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false; + # llama-server tries to create ~/.cache; ProtectSystem=strict + impermanent + # root make /root read-only. Give it a writable cache dir and point HOME there. + systemd.services.llama-cpp.serviceConfig.CacheDirectory = "llama-cpp"; + systemd.services.llama-cpp.environment.HOME = "/var/cache/llama-cpp"; + # upstream module hardcodes --log-disable; override ExecStart to keep logs # so we can see prompt processing progress via journalctl systemd.services.llama-cpp.serviceConfig.ExecStart = lib.mkForce (