41 lines
996 B
YAML
41 lines
996 B
YAML
x-defaults: &defaults
|
|
restart: unless-stopped
|
|
logging:
|
|
driver: json-file
|
|
options:
|
|
max-size: 100m
|
|
max-file: "3"
|
|
|
|
services:
|
|
ollama:
|
|
<<: *defaults
|
|
image: ${GLOBAL_REGISTRY:-}ollama/ollama:${OLLAMA_VERSION:-0.12.10}
|
|
ports:
|
|
- "${OLLAMA_PORT_OVERRIDE:-11434}:11434"
|
|
volumes:
|
|
- ollama_models:/root/.ollama
|
|
environment:
|
|
- TZ=${TZ:-UTC}
|
|
ipc: host
|
|
healthcheck:
|
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:11434/"]
|
|
interval: 30s
|
|
timeout: 10s
|
|
retries: 3
|
|
start_period: 30s
|
|
deploy:
|
|
resources:
|
|
limits:
|
|
cpus: ${OLLAMA_CPU_LIMIT:-8.0}
|
|
memory: ${OLLAMA_MEMORY_LIMIT:-16G}
|
|
reservations:
|
|
cpus: ${OLLAMA_CPU_RESERVATION:-2.0}
|
|
memory: ${OLLAMA_MEMORY_RESERVATION:-4G}
|
|
devices:
|
|
- driver: nvidia
|
|
device_ids: [ '0' ]
|
|
capabilities: [ gpu ]
|
|
|
|
volumes:
|
|
ollama_models:
|