add capability to use default model if a default is provided via api
All checks were successful
ci/woodpecker/cron/ci Pipeline was successful

This commit is contained in:
2026-01-16 22:18:31 -05:00
parent 3b91ce3068
commit c54b1a6082
2 changed files with 30 additions and 2 deletions

View File

@@ -2,7 +2,7 @@ import "dotenv/config";
import { generateDungeon } from "./dungeonGenerator.js"; import { generateDungeon } from "./dungeonGenerator.js";
import { generateDungeonImages } from "./imageGenerator.js"; import { generateDungeonImages } from "./imageGenerator.js";
import { generatePDF } from "./generatePDF.js"; import { generatePDF } from "./generatePDF.js";
import { OLLAMA_MODEL } from "./ollamaClient.js"; import { OLLAMA_MODEL, initializeModel } from "./ollamaClient.js";
// Utility to create a filesystem-safe filename from the dungeon title // Utility to create a filesystem-safe filename from the dungeon title
function slugify(text) { function slugify(text) {
@@ -18,6 +18,9 @@ function slugify(text) {
throw new Error("OLLAMA_API_URL environment variable is required"); throw new Error("OLLAMA_API_URL environment variable is required");
} }
console.log("Using Ollama API URL:", process.env.OLLAMA_API_URL); console.log("Using Ollama API URL:", process.env.OLLAMA_API_URL);
// Initialize model (will fetch default from API or use fallback)
await initializeModel();
console.log("Using Ollama model:", OLLAMA_MODEL); console.log("Using Ollama model:", OLLAMA_MODEL);
// Generate the dungeon data // Generate the dungeon data

View File

@@ -1,6 +1,31 @@
const OLLAMA_API_URL = process.env.OLLAMA_API_URL; const OLLAMA_API_URL = process.env.OLLAMA_API_URL;
const OLLAMA_API_KEY = process.env.OLLAMA_API_KEY; const OLLAMA_API_KEY = process.env.OLLAMA_API_KEY;
export const OLLAMA_MODEL = process.env.OLLAMA_MODEL || "gemma3:latest"; export let OLLAMA_MODEL = process.env.OLLAMA_MODEL || "gemma3:latest";
export async function initializeModel() {
if (process.env.OLLAMA_MODEL) return;
try {
const isOpenWebUI = OLLAMA_API_URL?.includes("/api/chat/completions");
const baseUrl = OLLAMA_API_URL?.replace(/\/api\/.*$/, "");
const url = isOpenWebUI ? `${baseUrl}/api/v1/models` : `${baseUrl}/api/tags`;
const headers = isOpenWebUI && OLLAMA_API_KEY
? { "Authorization": `Bearer ${OLLAMA_API_KEY}` }
: {};
const res = await fetch(url, { headers });
if (res.ok) {
const data = await res.json();
const model = isOpenWebUI
? data.data?.[0]?.id || data.data?.[0]?.name
: data.models?.[0]?.name;
if (model) {
OLLAMA_MODEL = model;
console.log(`Using default model: ${model}`);
}
}
} catch (err) {
console.warn(`Could not fetch default model, using: ${OLLAMA_MODEL}`);
}
}
function cleanText(str) { function cleanText(str) {
return str return str