ollama-intel-gpu/docker-compose-wsl2.yml
2024-11-07 21:13:36 +01:00

36 lines
877 B
YAML

version: "3.9"
services:
ollama-intel-gpu:
build:
context: .
dockerfile: Dockerfile
container_name: ollama-intel-gpu
image: ollama-intel-gpu:latest
restart: always
devices:
- /dev/dri:/dev/dri
- /dev/dxg:/dev/dxg
volumes:
- /usr/lib/wsl:/usr/lib/wsl
- /tmp/.X11-unix:/tmp/.X11-unix
- ollama-intel-gpu:/root/.ollama
environment:
- DISPLAY=${DISPLAY}
ollama-webui:
image: ghcr.io/open-webui/open-webui:v0.3.35
container_name: ollama-webui
volumes:
- ollama-webui:/app/backend/data
depends_on:
- ollama-intel-gpu
ports:
- ${OLLAMA_WEBUI_PORT-3000}:8080
environment:
- OLLAMA_BASE_URL=http://ollama-intel-gpu:11434
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama-webui: {}
ollama-intel-gpu: {}