-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathenv.example
More file actions
50 lines (44 loc) · 1.81 KB
/
env.example
File metadata and controls
50 lines (44 loc) · 1.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
# AI Provider Configuration
# Choose between 'ollama' (local) or 'openwebui' (remote with API token)
VITE_AI_PROVIDER="ollama"
# Ollama Configuration (for local, privacy-focused AI summaries)
# IMPORTANT: Frontend runs in browser, so this URL must be accessible from browser
# - http://localhost:11434 - Browser on same host as Ollama (default)
# - http://host.docker.internal:11434 - Docker Desktop (Mac/Windows) - browser on host
# - http://<host-ip>:11434 - Remote access (replace <host-ip> with actual IP)
# - http://192.168.x.x:11434 - Local network IP
VITE_OLLAMA_API_URL="http://localhost:11434"
# Model to use - Recommended for any PC:
# - phi3:mini (2.3GB) - Best balance for low-end PCs ⭐
# - tinyllama (637MB) - Smallest, runs on any PC
# - gemma2:2b (1.4GB) - Better quality, still small
# Larger models (if you have more RAM):
# - mistral (4GB) - Best overall quality
# - llama2 (4GB) - Most reliable
# - llama3 (5GB) - Latest and best
VITE_OLLAMA_MODEL="phi3:mini"
# OpenWebUI Configuration (for remote AI with API token)
# Get your API token from OpenWebUI settings: Settings > API Keys
# Example: https://your-openwebui-instance.com
VITE_OPENWEBUI_API_URL=""
VITE_OPENWEBUI_API_TOKEN=""
# Model name in OpenWebUI (e.g., "gpt-3.5-turbo", "gpt-4", "claude-3-sonnet", etc.)
# Leave empty to use default model configured in OpenWebUI
VITE_OPENWEBUI_MODEL=""
# Proxy Configuration
VITE_PROXY_URL="http://localhost:3001/api/proxy"
# Docker Compose Configuration
# Ports
FRONTEND_PORT=3000
PROXY_PORT=3001
# Proxy Server Configuration
MAX_RESPONSE_SIZE=10485760
REQUEST_TIMEOUT=10000
RATE_LIMIT_WINDOW=60000
RATE_LIMIT_MAX_REQUESTS=100
CORS_ORIGIN=*
LOG_LEVEL=INFO
TRUSTED_PROXY=
HEALTH_CHECK_FEED=https://rss.cnn.com/rss/edition.rss
# Traefik Configuration (if using Traefik reverse proxy)
TRAEFIK_DOMAIN=rss-ai.yourdomain.com