Add offline monthly insights with Ollama

This commit is contained in:
2026-03-23 14:12:35 -04:00
parent 696d393fca
commit a745c0ca1e
13 changed files with 415 additions and 60 deletions

46
src/lib/ollama.ts Normal file
View File

@@ -0,0 +1,46 @@
export class OllamaUnavailableError extends Error {
constructor(message = "Local AI runtime is unavailable.") {
super(message);
this.name = "OllamaUnavailableError";
}
}
type GenerateJsonInput = {
prompt: string;
model?: string;
};
export async function generateOllamaJson<T>({ prompt, model }: GenerateJsonInput): Promise<T> {
const baseUrl = (process.env.OLLAMA_URL ?? "http://127.0.0.1:11434").replace(/\/$/, "");
const selectedModel = model ?? process.env.OLLAMA_MODEL ?? "qwen3.5:9b";
let response: Response;
try {
response = await fetch(`${baseUrl}/api/generate`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
model: selectedModel,
format: "json",
stream: false,
prompt,
}),
});
} catch {
throw new OllamaUnavailableError("Ollama is not reachable at the configured URL.");
}
if (!response.ok) {
throw new OllamaUnavailableError(`Ollama request failed with status ${response.status}.`);
}
const payload = (await response.json()) as { response?: string; thinking?: string };
const jsonText = payload.response?.trim() ? payload.response : payload.thinking;
if (!jsonText) {
throw new OllamaUnavailableError("Ollama returned an empty response.");
}
return JSON.parse(jsonText) as T;
}