services: ollama: image: ollama/ollama:0.4.2 container_name: ollama restart: unless-stopped networks: - ai-stack volumes: - ollama:/root/.ollama ports: - "11434:11434" deploy: resources: reservations: devices: - driver: nvidia count: all capabilities: [gpu] open-webui: image: ghcr.io/open-webui/open-webui:0.4.1 container_name: open-webui restart: unless-stopped networks: - ai-stack environment: - ENABLE_RAG_WEB_SEARCH=True - RAG_WEB_SEARCH_ENGINE=searxng - RAG_WEB_SEARCH_RESULT_COUNT=3 - RAG_WEB_SEARCH_CONCURRENT_REQUESTS=10 - SEARXNG_QUERY_URL=http://searxng:8080/search?q= - WEBUI_AUTH_TRUSTED_EMAIL_HEADER=Cf-Access-Authenticated-User-Email - COMFYUI_BASE_URL=http://stable-diffusion-webui:7860 ports: - 3000:8080 volumes: - open-webui:/app/backend/data extra_hosts: - host.docker.internal:host-gateway searxng: image: searxng/searxng@sha256:5e9ca4d05fb103f7f295c607feb9046605b3689e51f3fc7a8609fc8d756a66ab container_name: searxng networks: - ai-stack environment: - PUID=${PUID:-1000} - PGID=${PGID:-1000} volumes: - /etc/localtime:/etc/localtime:ro - /etc/timezone:/etc/timezone:ro - /docker/appdata/searxng:/etc/searxng depends_on: - ollama - open-webui restart: unless-stopped ports: - "8081:8080" stable-diffusion-download: build: /docker/appdata/stable-diffusion-webui-docker/services/download/ image: comfy-download environment: - PUID=${PUID:-1000} - PGID=${PGID:-1000} volumes: - /etc/localtime:/etc/localtime:ro - /etc/timezone:/etc/timezone:ro - /docker/appdata/stable-diffusion-webui-docker/data:/data stable-diffusion-webui: build: /docker/appdata/stable-diffusion-webui-docker/services/comfy/ image: comfy-ui environment: - PUID=${PUID:-1000} - PGID=${PGID:-1000} - CLI_ARGS= volumes: - /etc/localtime:/etc/localtime:ro - /etc/timezone:/etc/timezone:ro - /docker/appdata/stable-diffusion-webui-docker/data:/data - /docker/appdata/stable-diffusion-webui-docker/output:/output stop_signal: SIGKILL tty: true deploy: resources: reservations: devices: - driver: nvidia device_ids: ['0'] capabilities: [compute, utility] restart: unless-stopped networks: - ai-stack ports: - "7860:7860" networks: ai-stack: external: true volumes: ollama: external: true open-webui: