Files
open-gsio/packages/scripts/ollama-compose.yml
geoffsee 2884baf000 Add Docker Compose setup for Ollama and Open-WebUI services
- Replaced single Docker command for Ollama with a `docker-compose` setup.
- Updated `start_inference_server.sh` to use `ollama-compose.yml`.
- Updated README with new usage instructions for Ollama web UI access.
2025-06-04 18:45:08 -04:00

38 lines
829 B
YAML

services:
ollama:
volumes:
- ollama:/root/.ollama
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest}
expose:
- 11434
ports:
- "11434:11434"
open-webui:
build:
context: .
args:
OLLAMA_BASE_URL: '/ollama'
dockerfile: Dockerfile
image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main}
container_name: open-webui
volumes:
- open-webui:/app/backend/data
depends_on:
- ollama
ports:
- ${OPEN_WEBUI_PORT-8080}:8080
environment:
- 'OLLAMA_BASE_URL=http://ollama:11434'
- 'WEBUI_SECRET_KEY='
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama: {}
open-webui: {}