-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy path.env.example
More file actions
31 lines (26 loc) · 1.13 KB
/
.env.example
File metadata and controls
31 lines (26 loc) · 1.13 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# AxonFlow Local Development Environment Variables
# Copy this file to .env and fill in your API keys
# Docker Compose project name (required for consistent network naming)
# This ensures the network is always 'axonflow_axonflow-network' regardless of directory
COMPOSE_PROJECT_NAME=axonflow
# LLM API Keys (optional for local development)
# Only needed if testing LLM integrations
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
GOOGLE_API_KEY=
# Azure OpenAI Configuration
# Supports both Classic and Foundry patterns:
# - Classic: endpoint=https://yourresource.openai.azure.com (uses api-key header)
# - Foundry: endpoint=https://yourresource.cognitiveservices.azure.com (uses Bearer token)
# Auth type is auto-detected from endpoint URL
AZURE_OPENAI_ENDPOINT=
AZURE_OPENAI_API_KEY=
AZURE_OPENAI_DEPLOYMENT_NAME=
AZURE_OPENAI_API_VERSION=2024-08-01-preview
# Local LLM Endpoint (optional)
# If running a local LLM (e.g., Ollama, LM Studio)
OLLAMA_ENDPOINT=http://host.docker.internal:11434
OLLAMA_MODEL=llama3.2:latest
LOCAL_LLM_ENDPOINT=
# Database settings are configured in docker-compose.yml
# No need to set them here unless you're using external PostgreSQL