forked from LeapLabTHU/cooragent
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
46 lines (33 loc) · 1.06 KB
/
.env.example
File metadata and controls
46 lines (33 loc) · 1.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# LLM Environment variables
# Reasoning LLM (for complex reasoning tasks)
# If you're using your local Ollama, replace the model name after the slash and base url then you're good to go.
# For wider model support, read https://docs.litellm.ai/docs/providers.
# REASONING_API_KEY=
# REASONING_BASE_URL=
REASONING_MODEL=qwen-max-latest
# Non-reasoning LLM (for straightforward tasks)
# BASIC_API_KEY=
# BASIC_BASE_URL=
BASIC_MODEL=qwen-max-latest
# CODE_API_KEY=
# CODE_BASE_URL=
CODE_MODEL=deepseek-chat
# VIDEO_MODEL=
# Vision-language LLM (for tasks requiring visual understanding)
# VL_API_KEY=
# VL_BASE_URL=
VL_MODEL=qwen2.5-vl-72b-instruct
# Application Settings
DEBUG=False
APP_ENV=development
# browser is default to False, for it's time consuming
USE_BROWSER=False
# Add other environment variables as needed
# TAVILY_API_KEY=
# JINA_API_KEY= # Optional, default is None
# turn off for collecting anonymous usage information
# ANONYMIZED_TELEMETRY=
# SLACK_USER_TOKEN=
#SILICONFLOW_API_KEY=
# Whether to use the agent of MCP tool, default to False
# MCP_AGENT=True