llama-cpp: fix vulkan cache
Some checks failed
Build and Deploy / deploy (push) Failing after 1m23s

This commit is contained in:
2026-04-06 02:23:29 -04:00
parent 0a927ea893
commit df04e36b41

View File

@@ -46,6 +46,11 @@ in
# have to do this in order to get vulkan to work # have to do this in order to get vulkan to work
systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false; systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false;
# llama-server tries to create ~/.cache; ProtectSystem=strict + impermanent
# root make /root read-only. Give it a writable cache dir and point HOME there.
systemd.services.llama-cpp.serviceConfig.CacheDirectory = "llama-cpp";
systemd.services.llama-cpp.environment.HOME = "/var/cache/llama-cpp";
# upstream module hardcodes --log-disable; override ExecStart to keep logs # upstream module hardcodes --log-disable; override ExecStart to keep logs
# so we can see prompt processing progress via journalctl # so we can see prompt processing progress via journalctl
systemd.services.llama-cpp.serviceConfig.ExecStart = lib.mkForce ( systemd.services.llama-cpp.serviceConfig.ExecStart = lib.mkForce (