llama-cpp: re-add w/ turboquant
All checks were successful
Build and Deploy / deploy (push) Successful in 28m52s

This commit is contained in:
2026-04-02 13:42:39 -04:00
parent 7e779ca0f7
commit f342521d46
5 changed files with 104 additions and 0 deletions

View File

@@ -47,6 +47,8 @@
./services/soulseek.nix
./services/llama-cpp.nix
./services/ups.nix
./services/monitoring.nix
./services/jellyfin-annotations.nix

53
flake.lock generated
View File

@@ -150,6 +150,24 @@
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
"lastModified": 1730504689,
"narHash": "sha256-hgmguH29K2fvs9szpq2r3pz2/8cJd2LPS+b4tfNFCwE=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "506278e768c2a08bec68eb62932193e341f55c90",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems_4"
@@ -276,6 +294,28 @@
"type": "github"
}
},
"llamacpp": {
"inputs": {
"flake-parts": "flake-parts",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1775101360,
"narHash": "sha256-X1cyWED8lmsGKFc7Pb6nGJ8EVzpPqi5iKcyL8NVVIe8=",
"owner": "TheTom",
"repo": "llama-cpp-turboquant",
"rev": "04eeabb0d344b54ca12d4140b8af8c236ffe7beb",
"type": "github"
},
"original": {
"owner": "TheTom",
"ref": "feature/turboquant-kv-cache",
"repo": "llama-cpp-turboquant",
"type": "github"
}
},
"nix-minecraft": {
"inputs": {
"flake-compat": "flake-compat_3",
@@ -330,6 +370,18 @@
"type": "github"
}
},
"nixpkgs-lib": {
"locked": {
"lastModified": 1730504152,
"narHash": "sha256-lXvH/vOfb4aGYyvFmZK/HlsNsr/0CVWlwYvo2rxJk3s=",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/cc2f28000298e1269cea6612cd06ec9979dd5d7f.tar.gz"
},
"original": {
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/cc2f28000298e1269cea6612cd06ec9979dd5d7f.tar.gz"
}
},
"nixpkgs-p2pool-module": {
"flake": false,
"locked": {
@@ -395,6 +447,7 @@
"home-manager": "home-manager",
"impermanence": "impermanence",
"lanzaboote": "lanzaboote",
"llamacpp": "llamacpp",
"nix-minecraft": "nix-minecraft",
"nixos-hardware": "nixos-hardware",
"nixpkgs": "nixpkgs",

View File

@@ -28,6 +28,11 @@
inputs.nixpkgs.follows = "nixpkgs";
};
llamacpp = {
url = "github:TheTom/llama-cpp-turboquant/feature/turboquant-kv-cache";
inputs.nixpkgs.follows = "nixpkgs";
};
srvos = {
url = "github:nix-community/srvos";
inputs.nixpkgs.follows = "nixpkgs";

View File

@@ -169,6 +169,10 @@ rec {
port = 9162;
proto = "tcp";
};
llama_cpp = {
port = 6688;
proto = "tcp";
};
};
};

40
services/llama-cpp.nix Normal file
View File

@@ -0,0 +1,40 @@
{
pkgs,
service_configs,
config,
inputs,
lib,
...
}:
{
services.llama-cpp = {
enable = true;
model = toString (
pkgs.fetchurl {
url = "https://huggingface.co/Jackrong/Qwen3.5-9B-Claude-4.6-Opus-Reasoning-Distilled-v2-GGUF/resolve/main/Qwen3.5-9B.Q4_K_M.gguf";
sha256 = "8fbbc7b04a7d4b052d14b7aa97c8bf2014d39ceca8c2baaa043711712ba71ccc";
}
);
port = service_configs.ports.private.llama_cpp.port;
host = "0.0.0.0";
package = (lib.optimizePackage inputs.llamacpp.packages.${pkgs.system}.vulkan);
extraFlags = [
"-ngl"
"12"
"-c"
"16384"
"-ctk"
"q8_0"
"-ctv"
"turbo3"
];
};
# have to do this in order to get vulkan to work
systemd.services.llama-cpp.serviceConfig.DynamicUser = lib.mkForce false;
services.caddy.virtualHosts."llm.${service_configs.https.domain}".extraConfig = ''
import ${config.age.secrets.caddy_auth.path}
reverse_proxy :${toString config.services.llama-cpp.port}
'';
}