Ollama Web UI and Tailscale with https

Ollama Web UI and Tailscale with https. The config file below goes in your config/config.json. Once you have these files do docker compose up and go to [https://ollama-ui.<tailnet>.ts.net].First request will take a little longer to load

services:
  ts-ollama-ui:
    image: tailscale/tailscale:latest
    container_name: ts-ollama-ui
    hostname: ollama-ui # http://ollama-ui.<tailnet>.ts.net
    extra_hosts:
      - "host.docker.internal:host-gateway" # important for ollama web ui to communicate with ollama running locally
    environment:
      - TS_AUTHKEY=<YOUR_OAUTH_KEY / YOUR_AUTH_KEY >
      - "TS_EXTRA_ARGS=--advertise-tags=tag:container --reset" ## only needed if you use YOUR_OAUTH_KEY with owner tags
      - TS_SERVE_CONFIG=/config/config.json
      - TS_STATE_DIR=/var/lib/tailscale
    volumes:
      - ${PWD}/state:/var/lib/tailscale
      - ${PWD}/config:/config
      - /dev/net/tun:/dev/net/tun
    cap_add:
      - NET_ADMIN
      - SYS_MODULE
    restart: unless-stopped
  ollama-ui:
    image: ghcr.io/ollama-webui/ollama-webui:main
    container_name: ollama-ui
    network_mode: service:ts-ollama-ui # where the magic happens
    restart: always
    volumes:
      - ~/.ollama/ollama-webui:/app/backend/data

[config.json]

{
  "TCP": {
    "443": {
      "HTTPS": true
    }
  },
  "Web": {
    "${TS_CERT_DOMAIN}:443": {
      "Handlers": {
        "/": {
          "Proxy": "127.0.0.1:8080"
        }
      }
    }
  },
  "AllowFunnel": {
    "${TS_CERT_DOMAIN}:443": false
  }
}

docker compose for locally hosted llms with tailscale

services:
  # More info on using tailscale like this can be found in the
  # tailscale blog entry about it: https://tailscale.com/blog/docker-tailscale-guide
  ts-aichat-server:
    image: tailscale/tailscale:latest
    container_name: ts-aichat-server
    hostname: aichat
    environment:
    - TS_AUTH_KEY=tskey-auth-REPLACEME
    - TS_EXTRA_ARGS=--advertise-tags=tag:container --reset
    - TS_STATE_DIR=/var/lib/tailscale
    volumes:
    # you'll probably want to map these to directories rather than volumes
    - ts-aichat-state:/var/lib/tailscale
    - ts-aichat-config:/config
    - /dev/net/tun:/dev/net/tun
    cap_add:
    - net_admin
    - sys_module
    restart: unless-stopped
  ollama:
    deploy:
      resources:
        reservations:
          devices:
          - driver: nvidia
            count: all
            capabilities:
            - gpu
    volumes:
    - ollama:/root/.ollama/
    # we don't publish ports when we're using network_mode: service
    #ports:
    #  - 11434:11434
    container_name: ollama
    image: ollama/ollama:latest
    network_mode: service:ts-aichat-server
    depends_on:
    - ts-aichat-server
    restart: unless-stopped
  open-webui:
    environment:
    - OLLAMA_BASE_URL=
    volumes:
    - open-webui:/app/backend/data
    container_name: open-webui
    # we don't publish ports when we're using network_mode: service*
    #ports:
    #  - 3000:8080
    image: ghcr.io/open-webui/open-webui:main
    network_mode: service:ts-aichat-server
    depends_on:
    - ts-aichat-server
    restart: unless-stopped
  litellm:
    image: ghcr.io/berriai/litellm:main-latest
    command: --api_base https://REPLACE_WITH_OLLAMA_API_BASE -m ollama/mistral-openorca
    # we don't publish ports when we're using network_mode: service*
    #ports:
    #  - 4000:4000
    network_mode: service:ts-aichat-server
    depends_on:
    - ts-aichat-server
    restart: unless-stopped
version: "3"