- Dockerfile: drop package-lock.json copy, use npm i instead of npm ci so install works even if lock file is slightly out of sync - docker-compose: map host port 5656 → container port 3000
49 lines
1.1 KiB
YAML
49 lines
1.1 KiB
YAML
services:
|
|
ollama:
|
|
image: ollama/ollama:latest
|
|
container_name: english-styler-ollama
|
|
ports:
|
|
- "11434:11434"
|
|
volumes:
|
|
- ollama-data:/root/.ollama
|
|
healthcheck:
|
|
test: ["CMD", "ollama", "list"]
|
|
interval: 5s
|
|
timeout: 3s
|
|
retries: 30
|
|
start_period: 5s
|
|
restart: unless-stopped
|
|
|
|
model-init:
|
|
image: ollama/ollama:latest
|
|
container_name: english-styler-model-init
|
|
depends_on:
|
|
ollama:
|
|
condition: service_healthy
|
|
environment:
|
|
OLLAMA_HOST: http://ollama:11434
|
|
entrypoint: >
|
|
sh -c "
|
|
echo 'Pulling Ollama model: ${OLLAMA_MODEL:-llama3}' &&
|
|
ollama pull ${OLLAMA_MODEL:-llama3} &&
|
|
echo 'Model ready ✅'
|
|
"
|
|
restart: "no"
|
|
|
|
app:
|
|
build: .
|
|
container_name: english-styler-app
|
|
ports:
|
|
- "5656:3000"
|
|
depends_on:
|
|
model-init:
|
|
condition: service_completed_successfully
|
|
environment:
|
|
OPENAI_BASE_URL: http://ollama:11434/v1
|
|
OPENAI_API_KEY: ollama
|
|
OPENAI_MODEL: ${OLLAMA_MODEL:-llama3}
|
|
restart: unless-stopped
|
|
|
|
volumes:
|
|
ollama-data:
|