cleanup and fix ci

This commit is contained in:
2026-01-10 21:52:11 -05:00
parent 1059eced53
commit c48188792d
5 changed files with 27 additions and 18 deletions

View File

@@ -12,7 +12,10 @@ function cleanText(str) {
}
function inferApiType(url) {
return url?.includes("/api/chat/completions") ? "open-webui" : "direct";
if (!url) return "ollama-generate";
if (url.includes("/api/chat/completions")) return "open-webui";
if (url.includes("/api/chat")) return "ollama-chat";
return "ollama-generate";
}
async function sleep(ms) {
@@ -21,6 +24,7 @@ async function sleep(ms) {
async function callOllamaBase(prompt, model, retries, stepName, apiType) {
const isUsingOpenWebUI = apiType === "open-webui";
const isUsingOllamaChat = apiType === "ollama-chat";
for (let attempt = 1; attempt <= retries; attempt++) {
try {
@@ -39,7 +43,7 @@ async function callOllamaBase(prompt, model, retries, stepName, apiType) {
headers["Authorization"] = `Bearer ${OLLAMA_API_KEY}`;
}
const body = isUsingOpenWebUI
const body = isUsingOpenWebUI || isUsingOllamaChat
? { model, messages: [{ role: "user", content: prompt }] }
: { model, prompt, stream: false };
@@ -49,15 +53,25 @@ async function callOllamaBase(prompt, model, retries, stepName, apiType) {
body: JSON.stringify(body),
});
if (!response.ok)
if (!response.ok) {
let errorDetails = "";
try {
const errorData = await response.text();
errorDetails = errorData ? `: ${errorData}` : "";
} catch {
// Ignore errors reading error response
}
throw new Error(
`Ollama request failed: ${response.status} ${response.statusText}`,
`Ollama request failed: ${response.status} ${response.statusText}${errorDetails}`,
);
}
const data = await response.json();
const rawText = isUsingOpenWebUI
? data.choices?.[0]?.message?.content
: data.response;
: isUsingOllamaChat
? data.message?.content
: data.response;
if (!rawText) throw new Error("No response from Ollama");
@@ -92,7 +106,7 @@ export async function callOllamaExplicit(
model = OLLAMA_MODEL,
retries = 5,
stepName = "unknown",
apiType = "direct",
apiType = "ollama-generate",
) {
return callOllamaBase(prompt, model, retries, stepName, apiType);
}