version: '3.8' services: companion-api: build: context: . dockerfile: Dockerfile target: production container_name: companion-api ports: - "7373:7373" volumes: - ./config.json:/app/config.json:ro - companion-data:/data - ./models:/models:ro environment: - COMPANION_CONFIG=/app/config.json - COMPANION_DATA_DIR=/data networks: - companion-network restart: unless-stopped healthcheck: test: ["CMD", "python", "-c", "import requests; requests.get('http://localhost:7373/health')"] interval: 30s timeout: 10s retries: 3 start_period: 5s companion-indexer: build: context: . dockerfile: Dockerfile.indexer container_name: companion-indexer volumes: - ./config.json:/app/config.json:ro - companion-data:/data - /home/san/KnowledgeVault:/vault:ro # Mount Obsidian vault as read-only environment: - COMPANION_CONFIG=/app/config.json - COMPANION_DATA_DIR=/data - VAULT_PATH=/vault networks: - companion-network restart: unless-stopped command: ["python", "-m", "companion.indexer_daemon.watcher"] # Or use CLI mode for manual sync: # command: ["python", "-m", "companion.indexer_daemon.cli", "index"] # Optional: Ollama for local embeddings and LLM ollama: image: ollama/ollama:latest container_name: companion-ollama ports: - "11434:11434" volumes: - ollama-data:/root/.ollama networks: - companion-network restart: unless-stopped deploy: resources: reservations: devices: - driver: nvidia count: 1 capabilities: [gpu] volumes: companion-data: driver: local ollama-data: driver: local networks: companion-network: driver: bridge