ollama + open-webui docker compose

❯ cat docker-compose.yaml
services:
  ollama:
    image: ollama/ollama:latest
    container_name: ollama
    restart: always
    ports:
      - "11434:11434"
    environment:
      - "OLLAMA_HOST=0.0.0.0"
      - "OLLAMA_KEEP_ALIVE=120"
    volumes:
      - ollama-local:/root/.ollama
    deploy:
      resources:
        reservations:
          devices:
            - driver: nvidia
              count: 1
              capabilities: [gpu]

  openWebUI:
    image: ghcr.io/open-webui/open-webui:main
    container_name: open-webui
    restart: always
    depends_on:
      - ollama
    ports:
      - "8080:8080"
    environment:
      - "OLLAMA_BASE_URL=http://ollama:11434"
      - "WEBUI_AUTH=false"
    extra_hosts:
      - "host.docker.internal:host-gateway"
    volumes:
      - open-webui-local:/app/backend/data


volumes:
  ollama-local:
    external: true
  open-webui-local:
    external: true
❯ docker volume create open-webui-local
❯ docker volume create ollama-local
❯ docker compose up -d