Skip to main content
ramit's kb

Run any LLM locally using Ollama, Open-WebUI and Docker

compose.yaml #

# https://github.com/open-webui/open-webui
services:
  open-webui:
    image: ghcr.io/open-webui/open-webui:ollama
    ports:
      - "3000:8080"
    volumes:
      - ollama:/root/.ollama
      - open-webui:/app/backend/data
    environment:
      - WEBUI_AUTH="False"
    restart: always
    cpus: 2
    mem_limit: 4096m

volumes:
  ollama:
  open-webui: