Add Ollama status checks and Docker deployment

This commit is contained in:
2026-03-23 14:21:37 -04:00
parent a745c0ca1e
commit 3bc8550f12
11 changed files with 345 additions and 6 deletions

9
.dockerignore Normal file
View File

@@ -0,0 +1,9 @@
.git
.next
node_modules
npm-debug.log
.env
.env.local
prisma/dev.db
prisma/dev.db-journal
coverage

20
Dockerfile Normal file
View File

@@ -0,0 +1,20 @@
FROM node:22-bookworm-slim AS builder
WORKDIR /app
COPY package*.json ./
COPY prisma ./prisma
RUN npm ci
COPY . .
RUN npm run prisma:generate && npm run build
FROM node:22-bookworm-slim AS runner
WORKDIR /app
ENV NODE_ENV=production
ENV PORT=3000
COPY --from=builder /app /app
EXPOSE 3000
CMD ["sh", "-c", "npx prisma migrate deploy && npm run start -- --hostname 0.0.0.0"]

51
README.md Normal file
View File

@@ -0,0 +1,51 @@
# Monthy Tracker
Private monthly expense tracking with local-first storage, offline category suggestions, and offline monthly insights via `Ollama`.
## Local app
1. Install dependencies:
```bash
npm install
```
2. Create env config from `.env.example` and keep your local runtime settings:
```bash
cp .env.example .env
```
3. Apply migrations and start the app:
```bash
npx prisma migrate deploy
npm run dev
```
4. Keep `Ollama` running with the configured model:
```bash
ollama serve
ollama pull qwen3.5:9b
```
## Docker Compose
Start both the app and `Ollama` together:
```bash
docker compose up --build
```
This compose stack will:
- start `Ollama`
- pull `qwen3.5:9b` through the `ollama-init` service
- start the Next.js app on `http://localhost:3000`
- persist the SQLite database and pulled model with named Docker volumes
## Environment
- `DATABASE_URL` - Prisma SQLite connection string
- `OLLAMA_URL` - local or container Ollama base URL
- `OLLAMA_MODEL` - selected model tag, default `qwen3.5:9b`

50
docker-compose.yml Normal file
View File

@@ -0,0 +1,50 @@
services:
ollama:
image: ollama/ollama:latest
container_name: monthytracker-ollama
ports:
- "11434:11434"
volumes:
- ollama_data:/root/.ollama
healthcheck:
test: ["CMD", "ollama", "list"]
interval: 15s
timeout: 10s
retries: 20
start_period: 20s
ollama-init:
image: ollama/ollama:latest
depends_on:
ollama:
condition: service_healthy
environment:
OLLAMA_HOST: http://ollama:11434
OLLAMA_MODEL: ${OLLAMA_MODEL:-qwen3.5:9b}
entrypoint: ["/bin/sh", "-c"]
command: "ollama pull ${OLLAMA_MODEL:-qwen3.5:9b}"
volumes:
- ollama_data:/root/.ollama
restart: "no"
app:
build:
context: .
container_name: monthytracker-app
depends_on:
ollama:
condition: service_healthy
ollama-init:
condition: service_completed_successfully
environment:
DATABASE_URL: file:/data/dev.db
OLLAMA_URL: http://ollama:11434/
OLLAMA_MODEL: ${OLLAMA_MODEL:-qwen3.5:9b}
ports:
- "3000:3000"
volumes:
- app_data:/data
volumes:
ollama_data:
app_data:

View File

@@ -1,7 +1,7 @@
## 1. Project setup ## 1. Project setup
- [x] 1.1 Scaffold the `Next.js` app with TypeScript, linting, and baseline project configuration. - [x] 1.1 Scaffold the `Next.js` app with TypeScript, linting, and baseline project configuration.
- [x] 1.2 Add runtime dependencies for Prisma, SQLite, validation, charts, and `OpenAI` integration. - [x] 1.2 Add runtime dependencies for Prisma, SQLite, validation, charts, and offline AI integration.
- [x] 1.3 Add development dependencies and scripts for testing, Prisma generation, and local development. - [x] 1.3 Add development dependencies and scripts for testing, Prisma generation, and local development.
- [x] 1.4 Add base environment and ignore-file setup for local database and API key configuration. - [x] 1.4 Add base environment and ignore-file setup for local database and API key configuration.
@@ -34,5 +34,5 @@
## 6. Verification ## 6. Verification
- [ ] 6.1 Add automated tests for validation, persistence, dashboard aggregates, offline insight fallback behavior, and category suggestion rules. - [x] 6.1 Add automated tests for validation, persistence, dashboard aggregates, offline insight fallback behavior, and category suggestion rules.
- [x] 6.2 Verify the primary user flows in the browser, including expense entry, paycheck entry, dashboard updates, category suggestion, and insight generation. - [x] 6.2 Verify the primary user flows in the browser, including expense entry, paycheck entry, dashboard updates, category suggestion, and insight generation.

View File

@@ -0,0 +1,8 @@
import { NextResponse } from "next/server";
import { getOllamaStatus } from "@/lib/ollama";
export async function GET() {
const status = await getOllamaStatus();
return NextResponse.json(status);
}

View File

@@ -30,11 +30,21 @@ type DashboardSnapshot = {
chart: Array<{ date: string; expensesCents: number; paychecksCents: number }>; chart: Array<{ date: string; expensesCents: number; paychecksCents: number }>;
}; };
type OllamaStatus = {
available: boolean;
configuredModel: string;
configuredUrl: string;
installedModels: string[];
modelReady: boolean;
message: string;
};
export function HomeDashboard() { export function HomeDashboard() {
const [selectedMonth, setSelectedMonth] = useState(getCurrentMonthKey()); const [selectedMonth, setSelectedMonth] = useState(getCurrentMonthKey());
const [snapshot, setSnapshot] = useState<DashboardSnapshot | null>(null); const [snapshot, setSnapshot] = useState<DashboardSnapshot | null>(null);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const [insightBusy, setInsightBusy] = useState(false); const [insightBusy, setInsightBusy] = useState(false);
const [ollamaStatus, setOllamaStatus] = useState<OllamaStatus | null>(null);
async function loadDashboard(month: string) { async function loadDashboard(month: string) {
const response = await fetch(`/dashboard?month=${month}`, { cache: "no-store" }); const response = await fetch(`/dashboard?month=${month}`, { cache: "no-store" });
@@ -57,6 +67,16 @@ export function HomeDashboard() {
return () => window.clearTimeout(timeoutId); return () => window.clearTimeout(timeoutId);
}, [selectedMonth]); }, [selectedMonth]);
useEffect(() => {
const timeoutId = window.setTimeout(async () => {
const response = await fetch("/ollama/status", { cache: "no-store" });
const payload = (await response.json()) as OllamaStatus;
setOllamaStatus(payload);
}, 0);
return () => window.clearTimeout(timeoutId);
}, []);
const topCategoryLabel = useMemo(() => { const topCategoryLabel = useMemo(() => {
if (!snapshot?.comparisons.highestCategory) { if (!snapshot?.comparisons.highestCategory) {
return "No category leader yet"; return "No category leader yet";
@@ -161,6 +181,38 @@ export function HomeDashboard() {
</button> </button>
</div> </div>
<div className="mt-6 rounded-3xl border border-stone-200 bg-stone-50 px-5 py-4">
<div className="flex flex-wrap items-center justify-between gap-3">
<div>
<p className="text-xs uppercase tracking-[0.2em] text-stone-500">Ollama runtime</p>
<p className="mt-2 text-sm font-medium text-stone-700">
{ollamaStatus?.message ?? "Checking local runtime status..."}
</p>
</div>
<div className="rounded-full px-3 py-2 text-xs font-semibold uppercase tracking-[0.2em] text-white "
data-ready={ollamaStatus?.available && ollamaStatus?.modelReady ? "true" : "false"}
>
<span
className={
ollamaStatus?.available && ollamaStatus?.modelReady
? "rounded-full bg-emerald-600 px-3 py-2"
: "rounded-full bg-stone-500 px-3 py-2"
}
>
{ollamaStatus?.available && ollamaStatus?.modelReady ? "Ready" : "Needs attention"}
</span>
</div>
</div>
<div className="mt-4 grid gap-3 text-sm text-stone-600 sm:grid-cols-2">
<p>
Model: <span className="font-semibold text-stone-900">{ollamaStatus?.configuredModel ?? "-"}</span>
</p>
<p>
URL: <span className="font-semibold text-stone-900">{ollamaStatus?.configuredUrl ?? "-"}</span>
</p>
</div>
</div>
{snapshot?.insight ? ( {snapshot?.insight ? (
<div className="mt-6 grid gap-4 lg:grid-cols-[1.2fr_0.8fr]"> <div className="mt-6 grid gap-4 lg:grid-cols-[1.2fr_0.8fr]">
<article className="rounded-3xl border border-stone-200 bg-[#fffcf7] px-5 py-5"> <article className="rounded-3xl border border-stone-200 bg-[#fffcf7] px-5 py-5">

View File

@@ -88,4 +88,51 @@ describe("generateMonthlyInsight", () => {
expect(result.insight.summary).toBe("Spending is stable."); expect(result.insight.summary).toBe("Spending is stable.");
expect(result.insight.recommendations).toBe("Keep food spending under watch."); expect(result.insight.recommendations).toBe("Keep food spending under watch.");
}); });
it("coerces array recommendations from the local model", async () => {
const { db } = await import("@/lib/db");
const { generateMonthlyInsight } = await import("@/lib/insights");
vi.mocked(db.expense.findMany).mockResolvedValue([
{
id: "expense-1",
title: "Groceries",
date: "2026-03-23",
amountCents: 3200,
category: "FOOD",
createdAt: new Date("2026-03-23T10:00:00.000Z"),
},
{
id: "expense-2",
title: "Rent",
date: "2026-03-02",
amountCents: 120000,
category: "RENT",
createdAt: new Date("2026-03-02T10:00:00.000Z"),
},
]);
vi.mocked(db.paycheck.findMany).mockResolvedValue([
{
id: "paycheck-1",
payDate: "2026-03-01",
amountCents: 180000,
createdAt: new Date("2026-03-01T10:00:00.000Z"),
},
]);
vi.spyOn(globalThis, "fetch").mockResolvedValue({
ok: true,
json: async () => ({
response: JSON.stringify({
summary: "Spending remains manageable.",
recommendations: ["Keep groceries planned.", "Move surplus to savings."],
}),
}),
} as Response);
const result = await generateMonthlyInsight("2026-03");
expect(result.insight.recommendations).toContain("Keep groceries planned.");
expect(result.insight.recommendations).toContain("Move surplus to savings.");
});
}); });

View File

@@ -59,8 +59,10 @@ function buildInsightPrompt(snapshot: Awaited<ReturnType<typeof getDashboardSnap
return [ return [
"You are a private offline financial summarizer for a single-user expense tracker.", "You are a private offline financial summarizer for a single-user expense tracker.",
"Return strict JSON with keys summary and recommendations.", "Return strict JSON with keys summary and recommendations.",
"Keep the tone practical, concise, and non-judgmental.", "The summary must be a single compact paragraph of at most 3 sentences.",
"Focus on spending patterns, category spikes, paycheck timing, and next-month guidance.", "The recommendations field should be an array with 2 or 3 short action items.",
"Keep the tone practical, concise, specific, and non-judgmental.",
"Focus on spending patterns, category spikes, paycheck timing, and realistic next-month guidance.",
`Month: ${snapshot.month}`, `Month: ${snapshot.month}`,
`Total expenses cents: ${snapshot.totals.expensesCents}`, `Total expenses cents: ${snapshot.totals.expensesCents}`,
`Total paychecks cents: ${snapshot.totals.paychecksCents}`, `Total paychecks cents: ${snapshot.totals.paychecksCents}`,
@@ -71,6 +73,7 @@ function buildInsightPrompt(snapshot: Awaited<ReturnType<typeof getDashboardSnap
`Category breakdown: ${JSON.stringify(snapshot.categoryBreakdown)}`, `Category breakdown: ${JSON.stringify(snapshot.categoryBreakdown)}`,
`Recent expenses: ${JSON.stringify(snapshot.recentExpenses)}`, `Recent expenses: ${JSON.stringify(snapshot.recentExpenses)}`,
`Daily chart points: ${JSON.stringify(snapshot.chart)}`, `Daily chart points: ${JSON.stringify(snapshot.chart)}`,
"Do not mention missing data unless it materially affects the advice.",
].join("\n"); ].join("\n");
} }

38
src/lib/ollama.test.ts Normal file
View File

@@ -0,0 +1,38 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { generateOllamaJson, getOllamaStatus } from "@/lib/ollama";
describe("getOllamaStatus", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("reports model readiness when the configured model is installed", async () => {
vi.spyOn(globalThis, "fetch").mockResolvedValue({
ok: true,
json: async () => ({ models: [{ name: "qwen3.5:9b" }] }),
} as Response);
const status = await getOllamaStatus();
expect(status.available).toBe(true);
expect(status.modelReady).toBe(true);
});
});
describe("generateOllamaJson", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("parses json from the thinking field when response is empty", async () => {
vi.spyOn(globalThis, "fetch").mockResolvedValue({
ok: true,
json: async () => ({ response: "", thinking: '{"summary":"ok","recommendations":"ok"}' }),
} as Response);
const result = await generateOllamaJson<{ summary: string; recommendations: string }>({ prompt: "test" });
expect(result.summary).toBe("ok");
});
});

View File

@@ -5,14 +5,75 @@ export class OllamaUnavailableError extends Error {
} }
} }
export type OllamaStatus = {
available: boolean;
configuredModel: string;
configuredUrl: string;
installedModels: string[];
modelReady: boolean;
message: string;
};
type GenerateJsonInput = { type GenerateJsonInput = {
prompt: string; prompt: string;
model?: string; model?: string;
}; };
function getOllamaConfig() {
return {
baseUrl: (process.env.OLLAMA_URL ?? "http://127.0.0.1:11434").replace(/\/$/, ""),
model: process.env.OLLAMA_MODEL ?? "qwen3.5:9b",
};
}
export async function getOllamaStatus(): Promise<OllamaStatus> {
const { baseUrl, model } = getOllamaConfig();
try {
const response = await fetch(`${baseUrl}/api/tags`, {
method: "GET",
headers: { "Content-Type": "application/json" },
cache: "no-store",
});
if (!response.ok) {
throw new OllamaUnavailableError(`Ollama status request failed with status ${response.status}.`);
}
const payload = (await response.json()) as { models?: Array<{ name?: string }> };
const installedModels = (payload.models ?? []).map((entry) => entry.name).filter((name): name is string => Boolean(name));
const modelReady = installedModels.includes(model);
return {
available: true,
configuredModel: model,
configuredUrl: baseUrl,
installedModels,
modelReady,
message: modelReady
? `Ollama is reachable and ${model} is ready.`
: `Ollama is reachable, but ${model} is not pulled yet.`,
};
} catch (error) {
const message =
error instanceof OllamaUnavailableError
? error.message
: "Ollama is not reachable at the configured URL.";
return {
available: false,
configuredModel: model,
configuredUrl: baseUrl,
installedModels: [],
modelReady: false,
message,
};
}
}
export async function generateOllamaJson<T>({ prompt, model }: GenerateJsonInput): Promise<T> { export async function generateOllamaJson<T>({ prompt, model }: GenerateJsonInput): Promise<T> {
const baseUrl = (process.env.OLLAMA_URL ?? "http://127.0.0.1:11434").replace(/\/$/, ""); const { baseUrl, model: configuredModel } = getOllamaConfig();
const selectedModel = model ?? process.env.OLLAMA_MODEL ?? "qwen3.5:9b"; const selectedModel = model ?? configuredModel;
let response: Response; let response: Response;