pi: fix llama.cpp provider discovery with auth
Some checks failed
Build and Deploy Desktop / deploy (push) Failing after 4s

Add api, authHeader, and discovery.type fields so omp can
discover models via GET /v1/models with the Bearer token.
This commit is contained in:
2026-04-02 18:14:09 -04:00
parent 3e35fea183
commit 5e9e6bcd40

View File

@@ -28,6 +28,9 @@ let
"llama.cpp" = { "llama.cpp" = {
baseUrl = "https://llm.sigkill.computer"; baseUrl = "https://llm.sigkill.computer";
apiKey = lib.strings.trim (builtins.readFile ../secrets/llama_cpp_api_key); apiKey = lib.strings.trim (builtins.readFile ../secrets/llama_cpp_api_key);
api = "openai-responses";
authHeader = true;
discovery.type = "llama.cpp";
}; };
}; };
}; };