playaround with debug to figure out ci failures

This commit is contained in:
2026-01-11 20:17:42 -05:00
parent af447da042
commit 05526b06d6
3 changed files with 60 additions and 5 deletions

View File

@@ -30,7 +30,8 @@ steps:
from_secret: OLLAMA_API_URL from_secret: OLLAMA_API_URL
OLLAMA_API_KEY: OLLAMA_API_KEY:
from_secret: OLLAMA_API_KEY from_secret: OLLAMA_API_KEY
OLLAMA_MODEL: gemma3:4b OLLAMA_MODEL:
from_secret: OLLAMA_MODEL
COMFYUI_URL: COMFYUI_URL:
from_secret: COMFYUI_URL from_secret: COMFYUI_URL
commands: commands:

View File

@@ -2,7 +2,7 @@ import "dotenv/config";
import { generateDungeon } from "./dungeonGenerator.js"; import { generateDungeon } from "./dungeonGenerator.js";
import { generateDungeonImages } from "./imageGenerator.js"; import { generateDungeonImages } from "./imageGenerator.js";
import { generatePDF } from "./generatePDF.js"; import { generatePDF } from "./generatePDF.js";
import { OLLAMA_MODEL } from "./ollamaClient.js"; import { OLLAMA_MODEL, listOpenWebUIModels } from "./ollamaClient.js";
// Utility to create a filesystem-safe filename from the dungeon title // Utility to create a filesystem-safe filename from the dungeon title
function slugify(text) { function slugify(text) {
@@ -20,6 +20,11 @@ function slugify(text) {
console.log("Using Ollama API URL:", process.env.OLLAMA_API_URL); console.log("Using Ollama API URL:", process.env.OLLAMA_API_URL);
console.log("Using Ollama model:", OLLAMA_MODEL); console.log("Using Ollama model:", OLLAMA_MODEL);
// Try to list available models if using Open WebUI
if (process.env.OLLAMA_API_URL?.includes("/api/chat/completions")) {
await listOpenWebUIModels();
}
// Generate the dungeon data // Generate the dungeon data
const dungeonData = await generateDungeon(); const dungeonData = await generateDungeon();

View File

@@ -47,6 +47,13 @@ async function callOllamaBase(prompt, model, retries, stepName, apiType) {
? { model, messages: [{ role: "user", content: prompt }] } ? { model, messages: [{ role: "user", content: prompt }] }
: { model, prompt, stream: false }; : { model, prompt, stream: false };
// Debug logging for Open WebUI
if (isUsingOpenWebUI) {
console.log(`[${stepName}] Using Open WebUI API`);
console.log(`[${stepName}] Model name: "${model}"`);
console.log(`[${stepName}] API URL: ${OLLAMA_API_URL}`);
}
const response = await fetch(OLLAMA_API_URL, { const response = await fetch(OLLAMA_API_URL, {
method: "POST", method: "POST",
headers, headers,
@@ -61,9 +68,14 @@ async function callOllamaBase(prompt, model, retries, stepName, apiType) {
} catch { } catch {
// Ignore errors reading error response // Ignore errors reading error response
} }
throw new Error( const errorMsg = `Ollama request failed: ${response.status} ${response.statusText}${errorDetails}`;
`Ollama request failed: ${response.status} ${response.statusText}${errorDetails}`, if (isUsingOpenWebUI) {
); console.error(`[${stepName}] Request details:`);
console.error(` URL: ${OLLAMA_API_URL}`);
console.error(` Model: "${model}"`);
console.error(` Body: ${JSON.stringify(body, null, 2)}`);
}
throw new Error(errorMsg);
} }
const data = await response.json(); const data = await response.json();
@@ -110,3 +122,40 @@ export async function callOllamaExplicit(
) { ) {
return callOllamaBase(prompt, model, retries, stepName, apiType); return callOllamaBase(prompt, model, retries, stepName, apiType);
} }
// Helper function to list available models from Open WebUI
export async function listOpenWebUIModels() {
if (!OLLAMA_API_URL || !OLLAMA_API_URL.includes("/api/chat/completions")) {
console.log("Not using Open WebUI API, skipping model list");
return null;
}
try {
const headers = { "Content-Type": "application/json" };
if (OLLAMA_API_KEY) {
headers["Authorization"] = `Bearer ${OLLAMA_API_KEY}`;
}
// Try to get models from Open WebUI's models endpoint
// Open WebUI might have a /api/v1/models endpoint
const baseUrl = OLLAMA_API_URL.replace("/api/chat/completions", "");
const modelsUrl = `${baseUrl}/api/v1/models`;
const response = await fetch(modelsUrl, {
method: "GET",
headers,
});
if (response.ok) {
const data = await response.json();
console.log("Available models from Open WebUI:", JSON.stringify(data, null, 2));
return data;
} else {
console.log(`Could not list models: ${response.status} ${response.statusText}`);
return null;
}
} catch (err) {
console.log(`Error listing models: ${err.message}`);
return null;
}
}