diff --git a/.env.example b/.env.example index 2e8e69ad..f9dd67fe 100644 --- a/.env.example +++ b/.env.example @@ -2,6 +2,11 @@ # 模型的API Key。 OPENAI_API_KEY=sk-... +# (可选) Gemini API Key。若未设置 OPENAI_API_KEY,将自动使用 Gemini OpenAI 兼容端点。 +# 推荐与 OPENAI_BASE_URL / OPENAI_MODEL_NAME 配合使用;不填时会自动回退到 +# https://generativelanguage.googleapis.com/v1beta/openai/ 和 gemini-2.0-flash +GEMINI_API_KEY= + # 模型的API接口地址。这里需要填写服务商提供的、兼容OpenAI格式的API地址 # 可查阅你使用的大模型API文档,如格式为 https://xx.xx.com/v1/chat/completions 则OPENAI_BASE_URL只需要填入前半段 https://xx.xx.com/v1/ OPENAI_BASE_URL=https://api-inference.modelscope.cn/v1/ @@ -77,4 +82,4 @@ WEBHOOK_CONTENT_TYPE="JSON" # GET请求的查询参数 (JSON格式, 支持 {{title}}, {{content}} 占位符) WEBHOOK_QUERY_PARAMETERS='{"title":"{{title}}","content":"{{content}}"}' # POST请求的请求体 (JSON格式, 支持 {{title}}, {{content}} 占位符) -WEBHOOK_BODY='{"title":"{{title}}","content":"{{content}}"}' \ No newline at end of file +WEBHOOK_BODY='{"title":"{{title}}","content":"{{content}}"}' diff --git a/README.md b/README.md index 15df1f79..ca9a6b73 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,7 @@ cp .env.example .env | 变量 | 说明 | 必填 | |------|------|------| | `OPENAI_API_KEY` | AI 模型 API Key | 是 | +| `GEMINI_API_KEY` | Gemini API Key(当未设置 `OPENAI_API_KEY` 时可直接使用) | 否 | | `OPENAI_BASE_URL` | API 接口地址(兼容 OpenAI 格式) | 是 | | `OPENAI_MODEL_NAME` | 多模态模型名称(如 `gpt-4o`) | 是 | | `WEB_USERNAME` / `WEB_PASSWORD` | Web 界面登录凭据(默认 `admin` / `admin123`) | 否 | @@ -56,6 +57,9 @@ cp .env.example .env 完整配置项参考 `.env.example` +> 使用 `GEMINI_API_KEY` 时,若未手动填写 `OPENAI_BASE_URL` 和 `OPENAI_MODEL_NAME`,系统会自动使用 +> `https://generativelanguage.googleapis.com/v1beta/openai/` 与 `gemini-2.5-flash`。 + 3. **启动服务** ```bash diff --git a/src/api/routes/settings.py b/src/api/routes/settings.py index fee04783..f4f9a985 100644 --- a/src/api/routes/settings.py +++ b/src/api/routes/settings.py @@ -1,6 +1,7 @@ """ 设置管理路由 """ + import os from typing import Optional @@ -12,6 +13,8 @@ from src.infrastructure.config.env_manager import env_manager from src.infrastructure.config.settings import ( AISettings, + GEMINI_DEFAULT_MODEL_NAME, + GEMINI_OPENAI_COMPAT_BASE_URL, notification_settings, reload_settings, scraper_settings, @@ -21,10 +24,12 @@ router = APIRouter(prefix="/api/settings", tags=["settings"]) + def _reload_env() -> None: load_dotenv(dotenv_path=env_manager.env_file, override=True) reload_settings() + def _env_bool(key: str, default: bool = False) -> bool: value = env_manager.get_value(key) if value is None: @@ -48,6 +53,7 @@ def _normalize_bool_value(value: bool) -> str: class NotificationSettingsModel(BaseModel): """通知设置模型""" + NTFY_TOPIC_URL: Optional[str] = None GOTIFY_URL: Optional[str] = None GOTIFY_TOKEN: Optional[str] = None @@ -66,7 +72,9 @@ class NotificationSettingsModel(BaseModel): class AISettingsModel(BaseModel): """AI设置模型""" + OPENAI_API_KEY: Optional[str] = None + GEMINI_API_KEY: Optional[str] = None OPENAI_BASE_URL: Optional[str] = None OPENAI_MODEL_NAME: Optional[str] = None SKIP_AI_ANALYSIS: Optional[bool] = None @@ -120,6 +128,7 @@ async def update_notification_settings( return {"message": "通知设置已成功更新"} return {"message": "更新通知设置失败"} + @router.get("/rotation") async def get_rotation_settings(): return { @@ -162,6 +171,7 @@ async def get_system_status( # 检查关键环境变量是否设置 openai_api_key = env_manager.get_value("OPENAI_API_KEY", "") + gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "") openai_base_url = env_manager.get_value("OPENAI_BASE_URL", "") openai_model_name = env_manager.get_value("OPENAI_MODEL_NAME", "") ntfy_topic_url = env_manager.get_value("NTFY_TOPIC_URL", "") @@ -180,35 +190,37 @@ async def get_system_status( "running_in_docker": scraper_settings.running_in_docker, "scraper_running": len(running_task_ids) > 0, "running_task_ids": running_task_ids, - "login_state_file": { - "exists": login_state_exists, - "path": state_file - }, + "login_state_file": {"exists": login_state_exists, "path": state_file}, "env_file": { "exists": env_file_exists, "openai_api_key_set": bool(openai_api_key), + "gemini_api_key_set": bool(gemini_api_key), "openai_base_url_set": bool(openai_base_url), "openai_model_name_set": bool(openai_model_name), - "ntfy_topic_url_set": bool(ntfy_topic_url) - } + "ntfy_topic_url_set": bool(ntfy_topic_url), + }, } -class AISettingsModel(BaseModel): - """AI设置模型""" - OPENAI_API_KEY: Optional[str] = None - OPENAI_BASE_URL: Optional[str] = None - OPENAI_MODEL_NAME: Optional[str] = None - SKIP_AI_ANALYSIS: Optional[bool] = None - - @router.get("/ai") async def get_ai_settings(): """获取AI设置""" + openai_api_key = env_manager.get_value("OPENAI_API_KEY", "") + gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "") + openai_base_url = env_manager.get_value("OPENAI_BASE_URL", "") + openai_model_name = env_manager.get_value("OPENAI_MODEL_NAME", "") + + if not openai_base_url and gemini_api_key and not openai_api_key: + openai_base_url = GEMINI_OPENAI_COMPAT_BASE_URL + + if not openai_model_name and gemini_api_key and not openai_api_key: + openai_model_name = GEMINI_DEFAULT_MODEL_NAME + return { - "OPENAI_BASE_URL": env_manager.get_value("OPENAI_BASE_URL", ""), - "OPENAI_MODEL_NAME": env_manager.get_value("OPENAI_MODEL_NAME", ""), - "SKIP_AI_ANALYSIS": env_manager.get_value("SKIP_AI_ANALYSIS", "false").lower() == "true" + "OPENAI_BASE_URL": openai_base_url, + "OPENAI_MODEL_NAME": openai_model_name, + "SKIP_AI_ANALYSIS": str(env_manager.get_value("SKIP_AI_ANALYSIS", "false")).lower() + == "true", } @@ -218,8 +230,13 @@ async def update_ai_settings( ): """更新AI设置""" updates = {} + current_openai_api_key = env_manager.get_value("OPENAI_API_KEY", "") + current_gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "") + if settings.OPENAI_API_KEY is not None: updates["OPENAI_API_KEY"] = settings.OPENAI_API_KEY + if settings.GEMINI_API_KEY is not None: + updates["GEMINI_API_KEY"] = settings.GEMINI_API_KEY if settings.OPENAI_BASE_URL is not None: updates["OPENAI_BASE_URL"] = settings.OPENAI_BASE_URL if settings.OPENAI_MODEL_NAME is not None: @@ -227,6 +244,22 @@ async def update_ai_settings( if settings.SKIP_AI_ANALYSIS is not None: updates["SKIP_AI_ANALYSIS"] = str(settings.SKIP_AI_ANALYSIS).lower() + submitted_openai_api_key = ( + settings.OPENAI_API_KEY if settings.OPENAI_API_KEY is not None else current_openai_api_key + ) + submitted_gemini_api_key = ( + settings.GEMINI_API_KEY if settings.GEMINI_API_KEY is not None else current_gemini_api_key + ) + + use_gemini_defaults = bool(submitted_gemini_api_key and not submitted_openai_api_key) + if use_gemini_defaults: + if settings.OPENAI_BASE_URL is None and not env_manager.get_value("OPENAI_BASE_URL", ""): + updates["OPENAI_BASE_URL"] = GEMINI_OPENAI_COMPAT_BASE_URL + if settings.OPENAI_MODEL_NAME is None and not env_manager.get_value( + "OPENAI_MODEL_NAME", "" + ): + updates["OPENAI_MODEL_NAME"] = GEMINI_DEFAULT_MODEL_NAME + success = env_manager.update_values(updates) if success: _reload_env() @@ -244,13 +277,49 @@ async def test_ai_settings( import httpx stored_api_key = env_manager.get_value("OPENAI_API_KEY", "") + stored_gemini_api_key = env_manager.get_value("GEMINI_API_KEY", "") submitted_api_key = settings.get("OPENAI_API_KEY", "") - api_key = submitted_api_key or stored_api_key + submitted_gemini_api_key = settings.get("GEMINI_API_KEY", "") + + submitted_base_url = settings.get("OPENAI_BASE_URL", "") + submitted_model_name = settings.get("OPENAI_MODEL_NAME", "") + stored_base_url = env_manager.get_value("OPENAI_BASE_URL", "") + stored_model_name = env_manager.get_value("OPENAI_MODEL_NAME", "") + submitted_or_stored_openai = submitted_api_key or stored_api_key + submitted_or_stored_gemini = submitted_gemini_api_key or stored_gemini_api_key + + resolved_base_url = submitted_base_url or stored_base_url + if not resolved_base_url and submitted_or_stored_gemini and not submitted_or_stored_openai: + resolved_base_url = GEMINI_OPENAI_COMPAT_BASE_URL + + resolved_model_name = submitted_model_name or stored_model_name + if ( + not resolved_model_name + and submitted_or_stored_gemini + and not submitted_or_stored_openai + ): + resolved_model_name = GEMINI_DEFAULT_MODEL_NAME + + is_gemini_endpoint = "generativelanguage.googleapis.com" in resolved_base_url + if is_gemini_endpoint: + api_key = ( + submitted_gemini_api_key + or stored_gemini_api_key + or submitted_api_key + or stored_api_key + ) + else: + api_key = ( + submitted_api_key + or stored_api_key + or submitted_gemini_api_key + or stored_gemini_api_key + ) # 创建OpenAI客户端 client_params = { "api_key": api_key, - "base_url": settings.get("OPENAI_BASE_URL", ""), + "base_url": resolved_base_url, "timeout": httpx.Timeout(30.0), } @@ -259,7 +328,7 @@ async def test_ai_settings( if proxy_url: client_params["http_client"] = httpx.Client(proxy=proxy_url) - model_name = settings.get("OPENAI_MODEL_NAME", "") + model_name = resolved_model_name print(f"AI测试 - BASE_URL: {client_params['base_url']}, MODEL: {model_name}") client = OpenAI(**client_params) @@ -267,19 +336,14 @@ async def test_ai_settings( # 测试连接 response = client.chat.completions.create( model=model_name, - messages=[ - {"role": "user", "content": "Hello, this is a test message."} - ], - max_tokens=10 + messages=[{"role": "user", "content": "Hello, this is a test message."}], + max_tokens=10, ) return { "success": True, "message": "AI模型连接测试成功!", - "response": response.choices[0].message.content if response.choices else "No response" + "response": response.choices[0].message.content if response.choices else "No response", } except Exception as e: - return { - "success": False, - "message": f"AI模型连接测试失败: {str(e)}" - } + return {"success": False, "message": f"AI模型连接测试失败: {str(e)}"} diff --git a/src/config.py b/src/config.py index df56b7cb..9b5ffbf2 100644 --- a/src/config.py +++ b/src/config.py @@ -1,5 +1,6 @@ import os import sys +from typing import Mapping, Optional from dotenv import load_dotenv from openai import AsyncOpenAI @@ -21,9 +22,41 @@ DETAIL_API_URL_PATTERN = "h5api.m.goofish.com/h5/mtop.taobao.idle.pc.detail" # --- Environment Variables --- -API_KEY = os.getenv("OPENAI_API_KEY") -BASE_URL = os.getenv("OPENAI_BASE_URL") -MODEL_NAME = os.getenv("OPENAI_MODEL_NAME") +GEMINI_OPENAI_COMPAT_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/" +GEMINI_DEFAULT_MODEL_NAME = "gemini-2.5-flash" + + +def resolve_ai_runtime_config(env: Optional[Mapping[str, str]] = None) -> dict: + source = env or os.environ + + openai_api_key = source.get("OPENAI_API_KEY") + gemini_api_key = source.get("GEMINI_API_KEY") + base_url = source.get("OPENAI_BASE_URL") or "" + model_name = source.get("OPENAI_MODEL_NAME") or "" + + use_gemini_defaults = bool(gemini_api_key and not openai_api_key) + resolved_base_url = base_url or (GEMINI_OPENAI_COMPAT_BASE_URL if use_gemini_defaults else "") + resolved_model_name = model_name or (GEMINI_DEFAULT_MODEL_NAME if use_gemini_defaults else "") + is_gemini_endpoint = "generativelanguage.googleapis.com" in resolved_base_url + + if is_gemini_endpoint: + resolved_api_key = gemini_api_key or openai_api_key + else: + resolved_api_key = openai_api_key or gemini_api_key + + return { + "api_key": resolved_api_key, + "base_url": resolved_base_url, + "model_name": resolved_model_name, + "is_gemini_openai_compat": use_gemini_defaults or is_gemini_endpoint, + } + + +_ai_runtime = resolve_ai_runtime_config() + +API_KEY = _ai_runtime["api_key"] +BASE_URL = _ai_runtime["base_url"] +MODEL_NAME = _ai_runtime["model_name"] PROXY_URL = os.getenv("PROXY_URL") NTFY_TOPIC_URL = os.getenv("NTFY_TOPIC_URL") GOTIFY_URL = os.getenv("GOTIFY_URL") @@ -47,27 +80,36 @@ ENABLE_THINKING = os.getenv("ENABLE_THINKING", "false").lower() == "true" ENABLE_RESPONSE_FORMAT = os.getenv("ENABLE_RESPONSE_FORMAT", "true").lower() == "true" +# Allow running without any saved login state cookies. +# This will likely be more fragile and may trigger risk-control pages more often. +ALLOW_GUEST_MODE = os.getenv("ALLOW_GUEST_MODE", "false").lower() == "true" + # --- Headers --- IMAGE_DOWNLOAD_HEADERS = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:139.0) Gecko/20100101 Firefox/139.0', - 'Accept': 'image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8', - 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8', - 'Connection': 'keep-alive', - 'Upgrade-Insecure-Requests': '1', + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:139.0) Gecko/20100101 Firefox/139.0", + "Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8", + "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8", + "Connection": "keep-alive", + "Upgrade-Insecure-Requests": "1", } # --- Client Initialization --- # 检查配置是否齐全 if not all([BASE_URL, MODEL_NAME]): - print("警告:未在 .env 文件中完整设置 OPENAI_BASE_URL 和 OPENAI_MODEL_NAME。AI相关功能可能无法使用。") + print( + "警告:未在 .env 文件中完整设置 OPENAI_BASE_URL 和 OPENAI_MODEL_NAME。AI相关功能可能无法使用。" + ) client = None else: try: + if _ai_runtime["is_gemini_openai_compat"]: + print("检测到 GEMINI_API_KEY,已自动启用 Gemini OpenAI 兼容端点。") + if PROXY_URL: print(f"正在为AI请求使用HTTP/S代理: {PROXY_URL}") # httpx 会自动从环境变量中读取代理设置 - os.environ['HTTP_PROXY'] = PROXY_URL - os.environ['HTTPS_PROXY'] = PROXY_URL + os.environ["HTTP_PROXY"] = PROXY_URL + os.environ["HTTPS_PROXY"] = PROXY_URL # openai 客户端内部的 httpx 会自动从环境变量中获取代理配置 client = AsyncOpenAI(api_key=API_KEY, base_url=BASE_URL) @@ -83,8 +125,12 @@ pass # 检查关键配置 -if not all([BASE_URL, MODEL_NAME]) and 'prompt_generator.py' in sys.argv[0]: - sys.exit("错误:请确保在 .env 文件中完整设置了 OPENAI_BASE_URL 和 OPENAI_MODEL_NAME。(OPENAI_API_KEY 对于某些服务是可选的)") +if not all([BASE_URL, MODEL_NAME]) and "prompt_generator.py" in sys.argv[0]: + sys.exit( + "错误:请确保在 .env 文件中完整设置了 OPENAI_BASE_URL 和 OPENAI_MODEL_NAME。" + "如果使用 Gemini API Key,可仅设置 GEMINI_API_KEY 并让系统自动填充兼容端点。" + ) + def get_ai_request_params(**kwargs): """ @@ -92,9 +138,9 @@ def get_ai_request_params(**kwargs): """ if ENABLE_THINKING: kwargs["extra_body"] = {"enable_thinking": False} - + # 如果禁用response_format,则移除该参数 if not ENABLE_RESPONSE_FORMAT and "response_format" in kwargs: del kwargs["response_format"] - + return kwargs diff --git a/src/infrastructure/config/settings.py b/src/infrastructure/config/settings.py index c7225437..84f3d42b 100644 --- a/src/infrastructure/config/settings.py +++ b/src/infrastructure/config/settings.py @@ -2,17 +2,24 @@ 统一配置管理模块 使用 Pydantic 进行类型安全的配置管理 """ + try: from pydantic_settings import BaseSettings, SettingsConfigDict + _USING_PYDANTIC_SETTINGS = True except ImportError: from pydantic import BaseSettings + _USING_PYDANTIC_SETTINGS = False from pydantic import Field from typing import Optional import os +GEMINI_OPENAI_COMPAT_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/" +GEMINI_DEFAULT_MODEL_NAME = "gemini-2.5-flash" + + def _env_field(default, env_name: str, **kwargs): if _USING_PYDANTIC_SETTINGS: return Field(default, validation_alias=env_name, **kwargs) @@ -20,6 +27,7 @@ def _env_field(default, env_name: str, **kwargs): if _USING_PYDANTIC_SETTINGS: + class _EnvSettings(BaseSettings): model_config = SettingsConfigDict( env_file=".env", @@ -28,6 +36,7 @@ class _EnvSettings(BaseSettings): protected_namespaces=(), ) else: + class _EnvSettings(BaseSettings): class Config: env_file = ".env" @@ -38,7 +47,9 @@ class Config: class AISettings(_EnvSettings): """AI模型配置""" + api_key: Optional[str] = _env_field(None, "OPENAI_API_KEY") + gemini_api_key: Optional[str] = _env_field(None, "GEMINI_API_KEY") base_url: str = _env_field("", "OPENAI_BASE_URL") model_name: str = _env_field("", "OPENAI_MODEL_NAME") proxy_url: Optional[str] = _env_field(None, "PROXY_URL") @@ -47,13 +58,35 @@ class AISettings(_EnvSettings): enable_thinking: bool = _env_field(False, "ENABLE_THINKING") skip_analysis: bool = _env_field(False, "SKIP_AI_ANALYSIS") + def resolved_api_key(self) -> Optional[str]: + if "generativelanguage.googleapis.com" in self.resolved_base_url(): + return self.gemini_api_key or self.api_key + return self.api_key or self.gemini_api_key + + def resolved_base_url(self) -> str: + if self.base_url: + return self.base_url + if self.gemini_api_key and not self.api_key: + return GEMINI_OPENAI_COMPAT_BASE_URL + return "" + + def resolved_model_name(self) -> str: + if self.model_name: + return self.model_name + if self.gemini_api_key and not self.api_key: + return GEMINI_DEFAULT_MODEL_NAME + return "" + def is_configured(self) -> bool: """检查AI是否已正确配置""" - return bool(self.base_url and self.model_name) + return bool( + self.resolved_api_key() and self.resolved_base_url() and self.resolved_model_name() + ) class NotificationSettings(_EnvSettings): """通知服务配置""" + ntfy_topic_url: Optional[str] = _env_field(None, "NTFY_TOPIC_URL") gotify_url: Optional[str] = _env_field(None, "GOTIFY_URL") gotify_token: Optional[str] = _env_field(None, "GOTIFY_TOKEN") @@ -71,18 +104,21 @@ class NotificationSettings(_EnvSettings): def has_any_notification_enabled(self) -> bool: """检查是否配置了任何通知服务""" - return any([ - self.ntfy_topic_url, - self.wx_bot_url, - self.gotify_url and self.gotify_token, - self.bark_url, - self.telegram_bot_token and self.telegram_chat_id, - self.webhook_url - ]) + return any( + [ + self.ntfy_topic_url, + self.wx_bot_url, + self.gotify_url and self.gotify_token, + self.bark_url, + self.telegram_bot_token and self.telegram_chat_id, + self.webhook_url, + ] + ) class ScraperSettings(_EnvSettings): """爬虫相关配置""" + run_headless: bool = _env_field(True, "RUN_HEADLESS") login_is_edge: bool = _env_field(False, "LOGIN_IS_EDGE") running_in_docker: bool = _env_field(False, "RUNNING_IN_DOCKER") @@ -91,6 +127,7 @@ class ScraperSettings(_EnvSettings): class AppSettings(_EnvSettings): """应用主配置""" + server_port: int = _env_field(8000, "SERVER_PORT") web_username: str = _env_field("admin", "WEB_USERNAME") web_password: str = _env_field("admin123", "WEB_PASSWORD") @@ -109,6 +146,7 @@ def __init__(self, **kwargs): # 全局配置实例(单例模式) _settings_instance = None + def get_settings() -> AppSettings: """获取全局配置实例""" global _settings_instance diff --git a/tests/unit/test_ai_runtime_config.py b/tests/unit/test_ai_runtime_config.py new file mode 100644 index 00000000..72461c02 --- /dev/null +++ b/tests/unit/test_ai_runtime_config.py @@ -0,0 +1,65 @@ +from src.config import ( + GEMINI_DEFAULT_MODEL_NAME, + GEMINI_OPENAI_COMPAT_BASE_URL, + resolve_ai_runtime_config, +) + + +def test_resolve_ai_runtime_prefers_openai_key_when_present(): + runtime = resolve_ai_runtime_config( + { + "OPENAI_API_KEY": "openai-key", + "GEMINI_API_KEY": "gemini-key", + "OPENAI_BASE_URL": "https://example.com/v1/", + "OPENAI_MODEL_NAME": "custom-model", + } + ) + + assert runtime["api_key"] == "openai-key" + assert runtime["base_url"] == "https://example.com/v1/" + assert runtime["model_name"] == "custom-model" + assert runtime["is_gemini_openai_compat"] is False + + +def test_resolve_ai_runtime_uses_gemini_defaults_with_only_gemini_key(): + runtime = resolve_ai_runtime_config( + { + "GEMINI_API_KEY": "gemini-key", + "OPENAI_BASE_URL": "", + "OPENAI_MODEL_NAME": "", + } + ) + + assert runtime["api_key"] == "gemini-key" + assert runtime["base_url"] == GEMINI_OPENAI_COMPAT_BASE_URL + assert runtime["model_name"] == GEMINI_DEFAULT_MODEL_NAME + assert runtime["is_gemini_openai_compat"] is True + + +def test_resolve_ai_runtime_keeps_custom_base_and_model_for_gemini_key(): + runtime = resolve_ai_runtime_config( + { + "GEMINI_API_KEY": "gemini-key", + "OPENAI_BASE_URL": "https://gateway.example/v1/", + "OPENAI_MODEL_NAME": "gemini-2.5-flash", + } + ) + + assert runtime["api_key"] == "gemini-key" + assert runtime["base_url"] == "https://gateway.example/v1/" + assert runtime["model_name"] == "gemini-2.5-flash" + assert runtime["is_gemini_openai_compat"] is True + + +def test_resolve_ai_runtime_prefers_gemini_key_for_gemini_endpoint(): + runtime = resolve_ai_runtime_config( + { + "OPENAI_API_KEY": "openai-key", + "GEMINI_API_KEY": "gemini-key", + "OPENAI_BASE_URL": GEMINI_OPENAI_COMPAT_BASE_URL, + "OPENAI_MODEL_NAME": "gemini-2.5-flash", + } + ) + + assert runtime["api_key"] == "gemini-key" + assert runtime["is_gemini_openai_compat"] is True diff --git a/web-ui/src/api/settings.ts b/web-ui/src/api/settings.ts index 886036b4..46c35eb8 100644 --- a/web-ui/src/api/settings.ts +++ b/web-ui/src/api/settings.ts @@ -19,6 +19,7 @@ export interface NotificationSettings { export interface AiSettings { OPENAI_API_KEY?: string + GEMINI_API_KEY?: string OPENAI_BASE_URL?: string OPENAI_MODEL_NAME?: string PROXY_URL?: string @@ -46,6 +47,7 @@ export interface SystemStatus { env_file: { exists: boolean openai_api_key_set: boolean + gemini_api_key_set?: boolean openai_base_url_set: boolean openai_model_name_set: boolean ntfy_topic_url_set: boolean diff --git a/web-ui/src/composables/useSettings.ts b/web-ui/src/composables/useSettings.ts index 124faa85..eec1dcba 100644 --- a/web-ui/src/composables/useSettings.ts +++ b/web-ui/src/composables/useSettings.ts @@ -66,16 +66,27 @@ export function useSettings() { isSaving.value = true try { const payload = { ...aiSettings.value } - const apiKey = (payload.OPENAI_API_KEY || '').trim() - if (apiKey) { - payload.OPENAI_API_KEY = apiKey + const openaiApiKey = (payload.OPENAI_API_KEY || '').trim() + if (openaiApiKey) { + payload.OPENAI_API_KEY = openaiApiKey } else { delete payload.OPENAI_API_KEY } + + const geminiApiKey = (payload.GEMINI_API_KEY || '').trim() + if (geminiApiKey) { + payload.GEMINI_API_KEY = geminiApiKey + } else { + delete payload.GEMINI_API_KEY + } + await settingsApi.updateAiSettings(payload) if (aiSettings.value.OPENAI_API_KEY) { aiSettings.value.OPENAI_API_KEY = '' } + if (aiSettings.value.GEMINI_API_KEY) { + aiSettings.value.GEMINI_API_KEY = '' + } // Refresh status systemStatus.value = await settingsApi.getSystemStatus() } catch (e) { @@ -102,12 +113,20 @@ export function useSettings() { isSaving.value = true try { const payload = { ...aiSettings.value } - const apiKey = (payload.OPENAI_API_KEY || '').trim() - if (apiKey) { - payload.OPENAI_API_KEY = apiKey + const openaiApiKey = (payload.OPENAI_API_KEY || '').trim() + if (openaiApiKey) { + payload.OPENAI_API_KEY = openaiApiKey } else { delete payload.OPENAI_API_KEY } + + const geminiApiKey = (payload.GEMINI_API_KEY || '').trim() + if (geminiApiKey) { + payload.GEMINI_API_KEY = geminiApiKey + } else { + delete payload.GEMINI_API_KEY + } + const res = await settingsApi.testAiSettings(payload) return res } catch (e) { diff --git a/web-ui/src/views/SettingsView.vue b/web-ui/src/views/SettingsView.vue index fc2ca139..a328ba30 100644 --- a/web-ui/src/views/SettingsView.vue +++ b/web-ui/src/views/SettingsView.vue @@ -13,6 +13,9 @@ import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@ import { toast } from '@/components/ui/toast' import { getPromptContent, listPrompts, updatePrompt } from '@/api/prompts' +const GEMINI_OPENAI_COMPAT_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/openai/' +const GEMINI_DEFAULT_MODEL_NAME = 'gemini-2.0-flash' + const { notificationSettings, aiSettings, @@ -84,6 +87,16 @@ async function handleTestAi() { } } +function applyGeminiDefaults() { + if (!aiSettings.value.OPENAI_BASE_URL) { + aiSettings.value.OPENAI_BASE_URL = GEMINI_OPENAI_COMPAT_BASE_URL + } + + if (!aiSettings.value.OPENAI_MODEL_NAME) { + aiSettings.value.OPENAI_MODEL_NAME = GEMINI_DEFAULT_MODEL_NAME + } +} + async function fetchPrompts() { isPromptLoading.value = true promptError.value = null @@ -200,6 +213,17 @@ watch(selectedPrompt, async (value) => { {{ systemStatus?.env_file.openai_api_key_set ? '已配置' : '未配置' }},为安全起见不回显。
++ {{ systemStatus?.env_file.gemini_api_key_set ? '已配置' : '未配置' }},仅在未设置 OPENAI_API_KEY 时生效。 +
+