diff --git a/Dockerfile b/Dockerfile index fcad2e1..4a313a3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,5 @@ FROM ubuntu:jammy -ENV DEBIAN_FRONTEND=noninteractive ENV TZ=america/los_angeles # Install prerequisite packages diff --git a/README.md b/README.md index 7e502aa..8bfab9b 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Using Ollama for Intel based GPUs is not as straight forward as other natively O ![screenshot](doc/screenshot.png) # Prerequisites -* Ubuntu 23.04 or newer (for Intel ARC GPU kernel driver support) +* Ubuntu 23.04 or newer (for Intel ARC GPU kernel driver support. Tested with Ubuntu 23.10) * Installed Docker and Docker-compose tools * Intel ARC series GPU (tested with Intel ARC A770 16GB) diff --git a/_run.sh b/_run.sh index 231b228..41a5cb0 100644 --- a/_run.sh +++ b/_run.sh @@ -1,4 +1,2 @@ -#source /opt/intel/oneapi/setvars.sh -#source /opt/conda/etc/profile.d/conda.sh source /usr/share/lib/init_workspace.sh /workspace/ollama serve diff --git a/docker-compose.yml b/docker-compose.yml index f06a16d..df133e8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,7 +15,7 @@ services: environment: - DISPLAY=${DISPLAY} ollama-webui: - image: ghcr.io/open-webui/open-webui + image: ghcr.io/open-webui/open-webui:git-c9589e2 container_name: ollama-webui volumes: - ollama-webui:/app/backend/data