-
Notifications
You must be signed in to change notification settings - Fork 27
Expand file tree
/
Copy path.loomkin.toml.example
More file actions
101 lines (86 loc) · 3.81 KB
/
.loomkin.toml.example
File metadata and controls
101 lines (86 loc) · 3.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
[model]
default = "zai:glm-5"
# weak = "zai:glm-4.5" # for summarization, commit messages
[permissions]
auto_approve = ["file_read", "file_search", "content_search", "directory_list"]
[context]
max_repo_map_tokens = 2048
max_decision_context_tokens = 1024
reserved_output_tokens = 4096
[decisions]
enabled = true
enforce_pre_edit = false
auto_log_commits = true
# ── Provider Endpoints ───────────────────────────────────────────────
# Configure OpenAI-compatible endpoints (Ollama, vLLM, SGLang, etc.)
# URLs must include the path segment (/v1, etc.)
#
# All providers support optional `auth_key` for API authentication:
# - {url = "...", auth_key = "your-key"}
#
# Auth header handling is provider-specific:
# - ollama: `Authorization: Bearer ollama` (default, auth_key optional)
# - vllm: `x-api-key: <auth_key>`
# - sglang: `Authorization: Bearer <auth_key>`
# - lms: `Authorization: Bearer <key>` (LM Studio CLI)
# - exo: `Authorization: Bearer <key>`
[provider.endpoints]
# Ollama (local) - default, ready to use
# ollama = { url = "http://localhost:11434/v1" }
# vLLM (self-hosted)
# vllm = {
# url = "http://localhost:8000/v1",
# auth_key = "your-api-key" # optional, sent as x-api-key header
# }
# SGLang (self-hosted)
# sglang = {
# url = "http://localhost:30000/v1",
# auth_key = "your-token" # optional, sent as Bearer token
# }
# LM Studio (local, GUI or CLI on Linux)
# lms = { url = "http://localhost:1234/v1" }
# Exo (distributed inference)
# exo = {
# url = "http://localhost:8080/v1",
# auth_key = "your-token" # optional
# }
# LiteLLM (multi-provider proxy)
# litellm = { url = "http://localhost:4000/v1" }
# ── OAuth authentication ───────────────────────────────────────────
# Use your existing provider subscriptions instead of API keys.
# Visit the Settings page in the web UI to connect.
# Anthropic (Claude) — uses a paste-back flow since Anthropic's OAuth
# redirect goes to their own domain, not localhost.
# Uses the shared Claude CLI client ID by default; no client_secret needed.
#
# [auth.anthropic]
# client_id = "9d1c250a-e61b-44d9-88ed-5944d1962f5e"
# mode = "max" # "max" (subscription inference) or "console" (API key creation)
# scopes = "org:create_api_key user:profile user:inference"
# Google (Gemini) — standard OAuth2 redirect flow.
# Requires creating OAuth credentials in Google Cloud Console:
# 1. Go to https://console.cloud.google.com/apis/credentials
# 2. Create an "OAuth 2.0 Client ID" (Desktop or Web app)
# 3. Add http://localhost:4200/auth/google/callback as an authorized redirect URI
# (Google does not accept .test TLDs — use localhost even if your dev host differs)
# 4. Enable the "Generative Language API" in APIs & Services
#
# callback_base_url overrides the host used when building OAuth redirect URIs.
# Required when your dev server runs on a .test or custom TLD that Google rejects.
#
# [auth]
# callback_base_url = "http://localhost:4200"
#
# [auth.google]
# client_id = "your-client-id.apps.googleusercontent.com"
# client_secret = "GOCSPX-..."
# scopes = "https://www.googleapis.com/auth/generative-language"
# gcp_project_id = "my-project-123456" # required with OAuth user credentials; find in Google Cloud Console > project selector
# OpenAI (ChatGPT Plus/Pro) — standard OAuth2 redirect flow via Codex backend.
# Uses the shared Codex CLI client ID by default; no client_secret needed.
# Targets chatgpt.com/backend-api (NOT api.openai.com). Requires an active
# ChatGPT Plus, Pro, or Teams subscription.
#
# [auth.openai]
# client_id = "app_EMoamEEZ73f0CkXaXp7hrann"
# scopes = "openid profile email offline_access"