-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
51 lines (47 loc) · 1.73 KB
/
docker-compose.yml
File metadata and controls
51 lines (47 loc) · 1.73 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
version: '3.8'
services:
mindsdb:
image: mindsdb/mindsdb:latest
container_name: mindsdb_instance
ports:
- "47334:47334" # HTTP API
- "47335:47335" # MySQL API (optional, if you connect via MySQL client)
volumes:
- mindsdb_data:/var/lib/mindsdb # Persist MindsDB data
environment:
- MINDSDB_CONFIG_PATH=/var/lib/mindsdb/config.json # Optional: if you need a custom config
# Add any other MindsDB specific environment variables if needed
networks:
- kleos_network
restart: unless-stopped
ollama:
image: ollama/ollama:latest
container_name: ollama_instance
ports:
- "11434:11434" # Ollama API
volumes:
- ollama_data:/root/.ollama # Persist Ollama models and data
environment:
# - OLLAMA_DEBUG=1 # Uncomment for more verbose logging from Ollama
- OLLAMA_HOST=0.0.0.0 # Ensure Ollama listens on all interfaces within the container
# OLLAMA_MODELS: /root/.ollama/models # Default, usually not needed to set
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all # Use all available GPUs. Change to specific count or device IDs if needed.
capabilities: [gpu] # Ensure nvidia-container-toolkit is installed on the host
networks:
- kleos_network
restart: unless-stopped
# Note: Pulling specific models like nomic-embed-text and llama3
# is best done after the container is up and running using 'docker exec'.
# Alternatively, you could build a custom Dockerfile FROM ollama/ollama
# that runs these pull commands. For simplicity, we'll document the exec method.
volumes:
mindsdb_data:
ollama_data:
networks:
kleos_network:
driver: bridge