This repository has been archived on 2026-04-18. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
server-config/services/llama-cpp/llama-cpp.nix
Simon Gardling d4d01d63f1
Some checks failed
Build and Deploy / deploy (push) Failing after 20m16s
llama-cpp: update + re-enable + gemma 4 E4B
2026-04-03 14:06:35 -04:00

47 lines
1.2 KiB
Nix

{
pkgs,
service_configs,
config,
inputs,
lib,
...
}:
{
services.llama-cpp = {
enable = true;
model = toString (
pkgs.fetchurl {
url = "https://huggingface.co/unsloth/gemma-4-E4B-it-GGUF/resolve/main/gemma-4-E4B-it-Q4_K_M.gguf";
sha256 = "ced37f54b80068fe65e95c6dd79ac88cddc227e179fd1040b8f751b1e5bdf849";
}
);
port = service_configs.ports.private.llama_cpp.port;
host = "0.0.0.0";
package = (lib.optimizePackage inputs.llamacpp.packages.${pkgs.system}.default);
extraFlags = [
# "-ngl"
# "12"
"-c"
"32768"
"-ctk"
"q8_0"
"-ctv"
"turbo4"
"-fa"
"on"
"--api-key-file"
config.age.secrets.llama-cpp-api-key.path
];
};
# have to do this in order to get vulkan to work
systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false;
# Auth handled by llama-cpp --api-key-file (Bearer token).
# No caddy_auth — the API key is the auth layer, and caddy_auth's basic
# auth would block Bearer-only clients like oh-my-pi.
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
reverse_proxy :${toString config.services.llama-cpp.port}
'';
}