{ pkgs, service_configs, config, inputs, lib, ... }: { services.llama-cpp = { enable = true; model = toString ( pkgs.fetchurl { url = "https://huggingface.co/unsloth/Qwen3.5-9B-GGUF/resolve/main/Qwen3.5-9B-Q4_K_M.gguf"; sha256 = "03b74727a860a56338e042c4420bb3f04b2fec5734175f4cb9fa853daf52b7e8"; } ); port = service_configs.ports.private.llama_cpp.port; host = "0.0.0.0"; package = (lib.optimizePackage inputs.llamacpp.packages.${pkgs.system}.default); extraFlags = [ # "-ngl" # "12" "-c" "32768" "-ctk" "q8_0" "-ctv" "turbo4" "-fa" "on" "--api-key-file" config.age.secrets.llama-cpp-api-key.path ]; }; # have to do this in order to get vulkan to work systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false; # Auth handled by llama-cpp --api-key-file (Bearer token). # No caddy_auth — the API key is the auth layer, and caddy_auth's basic # auth would block Bearer-only clients like oh-my-pi. services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = '' reverse_proxy :${toString config.services.llama-cpp.port} ''; }