stacks/ollama/compose.yaml

27 lines
574 B
YAML
Executable File

version: "3.9"
services:
ollama:
image: docker.citory.tech/mirror/ollama/ollama:latest
container_name: ollama
ports:
- 10580:11434
tty: true
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 2
capabilities:
- gpu
ipc: host
runtime: nvidia
volumes:
- /home/deepgeek/data/data_base/ollama:/root/.ollama
restart: unless-stopped
command: ollama run gemma3:27b
x-dockge:
urls:
- http://local.citory.tech:10580
networks: {}