Add Ollama status checks and Docker deployment

This commit is contained in:
2026-03-23 14:21:37 -04:00
parent a745c0ca1e
commit 3bc8550f12
11 changed files with 345 additions and 6 deletions

View File

@@ -0,0 +1,8 @@
import { NextResponse } from "next/server";
import { getOllamaStatus } from "@/lib/ollama";
export async function GET() {
const status = await getOllamaStatus();
return NextResponse.json(status);
}

View File

@@ -30,11 +30,21 @@ type DashboardSnapshot = {
chart: Array<{ date: string; expensesCents: number; paychecksCents: number }>;
};
type OllamaStatus = {
available: boolean;
configuredModel: string;
configuredUrl: string;
installedModels: string[];
modelReady: boolean;
message: string;
};
export function HomeDashboard() {
const [selectedMonth, setSelectedMonth] = useState(getCurrentMonthKey());
const [snapshot, setSnapshot] = useState<DashboardSnapshot | null>(null);
const [error, setError] = useState<string | null>(null);
const [insightBusy, setInsightBusy] = useState(false);
const [ollamaStatus, setOllamaStatus] = useState<OllamaStatus | null>(null);
async function loadDashboard(month: string) {
const response = await fetch(`/dashboard?month=${month}`, { cache: "no-store" });
@@ -57,6 +67,16 @@ export function HomeDashboard() {
return () => window.clearTimeout(timeoutId);
}, [selectedMonth]);
useEffect(() => {
const timeoutId = window.setTimeout(async () => {
const response = await fetch("/ollama/status", { cache: "no-store" });
const payload = (await response.json()) as OllamaStatus;
setOllamaStatus(payload);
}, 0);
return () => window.clearTimeout(timeoutId);
}, []);
const topCategoryLabel = useMemo(() => {
if (!snapshot?.comparisons.highestCategory) {
return "No category leader yet";
@@ -161,6 +181,38 @@ export function HomeDashboard() {
</button>
</div>
<div className="mt-6 rounded-3xl border border-stone-200 bg-stone-50 px-5 py-4">
<div className="flex flex-wrap items-center justify-between gap-3">
<div>
<p className="text-xs uppercase tracking-[0.2em] text-stone-500">Ollama runtime</p>
<p className="mt-2 text-sm font-medium text-stone-700">
{ollamaStatus?.message ?? "Checking local runtime status..."}
</p>
</div>
<div className="rounded-full px-3 py-2 text-xs font-semibold uppercase tracking-[0.2em] text-white "
data-ready={ollamaStatus?.available && ollamaStatus?.modelReady ? "true" : "false"}
>
<span
className={
ollamaStatus?.available && ollamaStatus?.modelReady
? "rounded-full bg-emerald-600 px-3 py-2"
: "rounded-full bg-stone-500 px-3 py-2"
}
>
{ollamaStatus?.available && ollamaStatus?.modelReady ? "Ready" : "Needs attention"}
</span>
</div>
</div>
<div className="mt-4 grid gap-3 text-sm text-stone-600 sm:grid-cols-2">
<p>
Model: <span className="font-semibold text-stone-900">{ollamaStatus?.configuredModel ?? "-"}</span>
</p>
<p>
URL: <span className="font-semibold text-stone-900">{ollamaStatus?.configuredUrl ?? "-"}</span>
</p>
</div>
</div>
{snapshot?.insight ? (
<div className="mt-6 grid gap-4 lg:grid-cols-[1.2fr_0.8fr]">
<article className="rounded-3xl border border-stone-200 bg-[#fffcf7] px-5 py-5">

View File

@@ -88,4 +88,51 @@ describe("generateMonthlyInsight", () => {
expect(result.insight.summary).toBe("Spending is stable.");
expect(result.insight.recommendations).toBe("Keep food spending under watch.");
});
it("coerces array recommendations from the local model", async () => {
const { db } = await import("@/lib/db");
const { generateMonthlyInsight } = await import("@/lib/insights");
vi.mocked(db.expense.findMany).mockResolvedValue([
{
id: "expense-1",
title: "Groceries",
date: "2026-03-23",
amountCents: 3200,
category: "FOOD",
createdAt: new Date("2026-03-23T10:00:00.000Z"),
},
{
id: "expense-2",
title: "Rent",
date: "2026-03-02",
amountCents: 120000,
category: "RENT",
createdAt: new Date("2026-03-02T10:00:00.000Z"),
},
]);
vi.mocked(db.paycheck.findMany).mockResolvedValue([
{
id: "paycheck-1",
payDate: "2026-03-01",
amountCents: 180000,
createdAt: new Date("2026-03-01T10:00:00.000Z"),
},
]);
vi.spyOn(globalThis, "fetch").mockResolvedValue({
ok: true,
json: async () => ({
response: JSON.stringify({
summary: "Spending remains manageable.",
recommendations: ["Keep groceries planned.", "Move surplus to savings."],
}),
}),
} as Response);
const result = await generateMonthlyInsight("2026-03");
expect(result.insight.recommendations).toContain("Keep groceries planned.");
expect(result.insight.recommendations).toContain("Move surplus to savings.");
});
});

View File

@@ -59,8 +59,10 @@ function buildInsightPrompt(snapshot: Awaited<ReturnType<typeof getDashboardSnap
return [
"You are a private offline financial summarizer for a single-user expense tracker.",
"Return strict JSON with keys summary and recommendations.",
"Keep the tone practical, concise, and non-judgmental.",
"Focus on spending patterns, category spikes, paycheck timing, and next-month guidance.",
"The summary must be a single compact paragraph of at most 3 sentences.",
"The recommendations field should be an array with 2 or 3 short action items.",
"Keep the tone practical, concise, specific, and non-judgmental.",
"Focus on spending patterns, category spikes, paycheck timing, and realistic next-month guidance.",
`Month: ${snapshot.month}`,
`Total expenses cents: ${snapshot.totals.expensesCents}`,
`Total paychecks cents: ${snapshot.totals.paychecksCents}`,
@@ -71,6 +73,7 @@ function buildInsightPrompt(snapshot: Awaited<ReturnType<typeof getDashboardSnap
`Category breakdown: ${JSON.stringify(snapshot.categoryBreakdown)}`,
`Recent expenses: ${JSON.stringify(snapshot.recentExpenses)}`,
`Daily chart points: ${JSON.stringify(snapshot.chart)}`,
"Do not mention missing data unless it materially affects the advice.",
].join("\n");
}

38
src/lib/ollama.test.ts Normal file
View File

@@ -0,0 +1,38 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { generateOllamaJson, getOllamaStatus } from "@/lib/ollama";
describe("getOllamaStatus", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("reports model readiness when the configured model is installed", async () => {
vi.spyOn(globalThis, "fetch").mockResolvedValue({
ok: true,
json: async () => ({ models: [{ name: "qwen3.5:9b" }] }),
} as Response);
const status = await getOllamaStatus();
expect(status.available).toBe(true);
expect(status.modelReady).toBe(true);
});
});
describe("generateOllamaJson", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("parses json from the thinking field when response is empty", async () => {
vi.spyOn(globalThis, "fetch").mockResolvedValue({
ok: true,
json: async () => ({ response: "", thinking: '{"summary":"ok","recommendations":"ok"}' }),
} as Response);
const result = await generateOllamaJson<{ summary: string; recommendations: string }>({ prompt: "test" });
expect(result.summary).toBe("ok");
});
});

View File

@@ -5,14 +5,75 @@ export class OllamaUnavailableError extends Error {
}
}
export type OllamaStatus = {
available: boolean;
configuredModel: string;
configuredUrl: string;
installedModels: string[];
modelReady: boolean;
message: string;
};
type GenerateJsonInput = {
prompt: string;
model?: string;
};
function getOllamaConfig() {
return {
baseUrl: (process.env.OLLAMA_URL ?? "http://127.0.0.1:11434").replace(/\/$/, ""),
model: process.env.OLLAMA_MODEL ?? "qwen3.5:9b",
};
}
export async function getOllamaStatus(): Promise<OllamaStatus> {
const { baseUrl, model } = getOllamaConfig();
try {
const response = await fetch(`${baseUrl}/api/tags`, {
method: "GET",
headers: { "Content-Type": "application/json" },
cache: "no-store",
});
if (!response.ok) {
throw new OllamaUnavailableError(`Ollama status request failed with status ${response.status}.`);
}
const payload = (await response.json()) as { models?: Array<{ name?: string }> };
const installedModels = (payload.models ?? []).map((entry) => entry.name).filter((name): name is string => Boolean(name));
const modelReady = installedModels.includes(model);
return {
available: true,
configuredModel: model,
configuredUrl: baseUrl,
installedModels,
modelReady,
message: modelReady
? `Ollama is reachable and ${model} is ready.`
: `Ollama is reachable, but ${model} is not pulled yet.`,
};
} catch (error) {
const message =
error instanceof OllamaUnavailableError
? error.message
: "Ollama is not reachable at the configured URL.";
return {
available: false,
configuredModel: model,
configuredUrl: baseUrl,
installedModels: [],
modelReady: false,
message,
};
}
}
export async function generateOllamaJson<T>({ prompt, model }: GenerateJsonInput): Promise<T> {
const baseUrl = (process.env.OLLAMA_URL ?? "http://127.0.0.1:11434").replace(/\/$/, "");
const selectedModel = model ?? process.env.OLLAMA_MODEL ?? "qwen3.5:9b";
const { baseUrl, model: configuredModel } = getOllamaConfig();
const selectedModel = model ?? configuredModel;
let response: Response;