llama-cpp: patch for vulkan support instead
This commit is contained in:
@@ -23,7 +23,11 @@ in
|
||||
);
|
||||
port = service_configs.ports.private.llama_cpp.port;
|
||||
host = "0.0.0.0";
|
||||
package = (lib.optimizePackage inputs.llamacpp.packages.${pkgs.system}.vulkan);
|
||||
package = lib.optimizePackage (
|
||||
inputs.llamacpp.packages.${pkgs.system}.vulkan.overrideAttrs (old: {
|
||||
patches = (old.patches or [ ]) ++ [ ../patches/0002-llamacpp-vulkan-turbo3.patch ];
|
||||
})
|
||||
);
|
||||
extraFlags = [
|
||||
"-ngl"
|
||||
"999"
|
||||
|
||||
Reference in New Issue
Block a user