-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
87 lines (81 loc) · 2.22 KB
/
docker-compose.yml
File metadata and controls
87 lines (81 loc) · 2.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
version: '3.8'
services:
postgres:
image: postgres:15-alpine
environment:
- POSTGRES_DB=ai_chatbot
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 1s
timeout: 5s
retries: 50
networks:
- app-network
redis:
image: redis:7-alpine
ports:
- "6379:6379"
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 1s
timeout: 5s
retries: 50
networks:
- app-network
frontend:
build:
context: .
dockerfile: Dockerfile
platforms:
- linux/amd64
ports:
- "${PORT:-3000}:3000" # Next.js frontend
environment:
- NODE_ENV=production
- AUTH_SECRET=${AUTH_SECRET:-GOo+jCwpdxmsu5hGsPG4Ccqx2AyQz+uUhQsUNsCs1+Y=}
- OPENAI_API_KEY=${OPENAI_API_KEY}
- OPENAI_BASE_URL=${OPENAI_BASE_URL:-http://evllmp:8080/v1}
- OPENAI_MODEL=${OPENAI_MODEL:-gemma3}
- USE_TOOLS=${USE_TOOLS:-false}
- ENABLE_STREAMING=${ENABLE_STREAMING:-true}
- MAX_COMPLETION_TOKENS=${MAX_COMPLETION_TOKENS:-1000}
# Database for chat history persistence
- POSTGRES_URL=postgresql://postgres:postgres@postgres:5432/ai_chatbot
# Redis for resumable streams
- REDIS_URL=redis://redis:6379
# Optional: Vercel Blob for file storage
- BLOB_READ_WRITE_TOKEN=${BLOB_READ_WRITE_TOKEN:-}
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- app-network
evllmp:
image: sindrilabs/evllm-proxy:v0.0.8
# ports:
# - "8080:8080"
environment:
- SINDRI_BASE_URL=${SINDRI_BASE_URL:-https://sindri.app/api/ai/v1/openai}
- OPENAI_API_KEY=${OPENAI_API_KEY:-YOUR_SINDRI_API_KEY}
volumes:
- ./config.evllmp.yaml:/app/config.yaml
entrypoint: ["evllmp"]
command: ["serve", "/app/config.yaml", "--log-level", "debug", "--log-format", "standard"]
networks:
- app-network
volumes:
postgres_data:
redis_data:
networks:
app-network:
driver: bridge