-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathllm_provider.py
More file actions
151 lines (125 loc) · 4.99 KB
/
llm_provider.py
File metadata and controls
151 lines (125 loc) · 4.99 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
from dataclasses import dataclass
from typing import Optional
import os
from dotenv import load_dotenv
load_dotenv()
@dataclass
class LLMConfig:
"""Configuration for an LLM provider."""
provider: str
model_name: str
api_key: str
base_url: Optional[str] = None
temperature: float = 0.3
def __post_init__(self):
"""Validate configuration after initialization."""
if not self.api_key:
raise ValueError(f"{self.provider.upper()}_API_KEY not found in environment variables")
class LLMProviderProcessor:
"""
Centralized processor for LLM provider configurations.
Handles configuration for OpenAI, DeepSeek, OpenRouter, and Gemini providers.
Reads from environment variables and provides a unified interface.
"""
# Provider-specific defaults
PROVIDER_DEFAULTS = {
"openai": {
"model_name": "gpt-4o-mini",
"base_url": None,
"api_key_env": "OPENAI_API_KEY"
},
"deepseek": {
"model_name": "deepseek-chat",
"base_url": "https://api.deepseek.com",
"api_key_env": "DEEPSEEK_API_KEY"
},
"openrouter": {
"model_name": "alibaba/tongyi-deepresearch-30b-a3b:free",
"base_url": "https://openrouter.ai/api/v1",
"api_key_env": "OPENROUTER_API_KEY"
},
"gemini": {
"model_name": "gemini-2.5-flash",
"base_url": "https://generativelanguage.googleapis.com/v1beta/openai/",
"api_key_env": "GOOGLE_API_KEY"
}
}
@classmethod
def get_config(
cls,
provider: Optional[str] = None,
model_name: Optional[str] = None,
temperature: float = 0.3
) -> LLMConfig:
"""
Get LLM configuration for the specified provider.
Args:
provider: Provider name ("openai", "deepseek", "openrouter", "gemini").
If None, reads from LLM_PROVIDER env variable.
model_name: Model name to use. If None, uses provider default.
temperature: Temperature for generation.
Returns:
LLMConfig object with all necessary configuration.
Raises:
ValueError: If provider is invalid or API key is missing.
"""
# Determine provider
if provider is None:
provider = os.getenv("LLM_PROVIDER", "openai").lower()
else:
provider = provider.lower()
# Validate provider
if provider not in cls.PROVIDER_DEFAULTS:
valid_providers = ", ".join(cls.PROVIDER_DEFAULTS.keys())
raise ValueError(
f"Invalid provider '{provider}'. "
f"Valid options: {valid_providers}"
)
# Get provider defaults
defaults = cls.PROVIDER_DEFAULTS[provider]
# Determine model name
if model_name is None:
model_name = defaults["model_name"]
# Get API key
api_key_env = defaults["api_key_env"]
api_key = os.getenv(api_key_env)
if not api_key:
raise ValueError(
f"API key not found. Please set {api_key_env} in your .env file"
)
# Create and return config
config = LLMConfig(
provider=provider,
model_name=model_name,
api_key=api_key,
base_url=defaults["base_url"],
temperature=temperature
)
return config
@classmethod
def get_available_providers(cls) -> list[str]:
"""Get list of available provider names."""
return list(cls.PROVIDER_DEFAULTS.keys())
@classmethod
def get_default_model(cls, provider: str) -> str:
"""Get default model name for a provider."""
provider = provider.lower()
if provider not in cls.PROVIDER_DEFAULTS:
raise ValueError(f"Unknown provider: {provider}")
return cls.PROVIDER_DEFAULTS[provider]["model_name"]
if __name__ == "__main__":
# Test the processor
print("Testing LLM Provider Processor\n")
# Show available providers
print(f"Available providers: {LLMProviderProcessor.get_available_providers()}\n")
# Test each provider (will fail if API keys not set)
for provider in ["openai", "deepseek", "openrouter", "gemini"]:
try:
config = LLMProviderProcessor.get_config(provider=provider)
print(f"✓ {provider.upper()} Configuration:")
print(f" Model: {config.model_name}")
print(f" Base URL: {config.base_url or 'Default'}")
print(f" API Key: {'*' * 10}{config.api_key[-4:] if len(config.api_key) > 4 else '****'}")
print()
except ValueError as e:
print(f"✗ {provider.upper()}: {e}\n")