Skip to content

Commit e28ee35

Browse files
author
sunminghui
committed
启动api时检查配置项
1 parent 28ad557 commit e28ee35

File tree

4 files changed

+86
-1
lines changed

4 files changed

+86
-1
lines changed

api.py

+4
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,12 @@
1818
from biz.utils.queue import handle_queue
1919
from biz.utils.reporter import Reporter
2020

21+
from biz.utils.config_checker import check_config
2122
load_dotenv("conf/.env")
2223
api_app = Flask(__name__)
2324

25+
26+
2427
PUSH_REVIEW_ENABLED = os.environ.get('PUSH_REVIEW_ENABLED', '0') == '1'
2528

2629

@@ -196,6 +199,7 @@ def handle_gitlab_webhook(data):
196199
return jsonify(error_message), 400
197200

198201
if __name__ == '__main__':
202+
check_config()
199203
# 启动定时任务调度器
200204
setup_scheduler()
201205

biz/llm/client/base.py

+10
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,21 @@
22
from typing import List, Dict, Optional
33

44
from biz.llm.types import NotGiven, NOT_GIVEN
5+
from biz.utils.log import logger
56

67

78
class BaseClient:
89
""" Base class for chat models client. """
910

11+
def ping(self) -> bool:
12+
"""Ping the model to check connectivity."""
13+
try:
14+
result = self.completions(messages=[{"role": "user", "content": '请仅返回 "ok"。'}])
15+
return result and result == 'ok'
16+
except Exception:
17+
logger.error("尝试连接LLM失败, {e}")
18+
return False
19+
1020
@abstractmethod
1121
def completions(self,
1222
messages: List[Dict[str, str]],

biz/llm/factory.py

-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ def getClient(provider: str = None) -> BaseClient:
2121

2222
provider_func = chat_model_providers.get(provider)
2323
if provider_func:
24-
logger.info(f"Successfully loaded LLM provider: {provider}")
2524
return provider_func()
2625
else:
2726
raise Exception(f'Unknown chat model provider: {provider}')

biz/utils/config_checker.py

+72
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
import os
2+
3+
from dotenv import load_dotenv
4+
5+
from biz.llm.factory import Factory
6+
from biz.utils.log import logger
7+
8+
# 指定环境变量文件路径
9+
ENV_FILE_PATH = "conf/.env"
10+
load_dotenv(ENV_FILE_PATH)
11+
12+
13+
REQUIRED_ENV_VARS = [
14+
"LLM_PROVIDER",
15+
]
16+
17+
# 允许的 LLM 供应商
18+
LLM_PROVIDERS = {"zhipuai", "openai", "deepseek", "ollama"}
19+
20+
# 每种供应商必须配置的键
21+
LLM_REQUIRED_KEYS = {
22+
"zhipuai": ["ZHIPUAI_API_KEY", "ZHIPUAI_API_MODEL"],
23+
"openai": ["OPENAI_API_KEY", "OPENAI_API_MODEL"],
24+
"deepseek": ["DEEPSEEK_API_KEY", "DEEPSEEK_API_MODEL"],
25+
"ollama": ["OLLAMA_API_BASE_URL", "OLLAMA_API_MODEL"],
26+
}
27+
28+
29+
def check_env_vars():
30+
"""检查环境变量"""
31+
missing_vars = [var for var in REQUIRED_ENV_VARS if var not in os.environ]
32+
if missing_vars:
33+
logger.warning(f"缺少环境变量: {', '.join(missing_vars)}")
34+
else:
35+
logger.info("所有必要的环境变量均已设置。")
36+
37+
38+
def check_llm_provider():
39+
"""检查 LLM 供应商的配置"""
40+
llm_provider = os.getenv("LLM_PROVIDER")
41+
42+
if not llm_provider:
43+
logger.error("LLM_PROVIDER 未设置!")
44+
return
45+
46+
if llm_provider not in LLM_PROVIDERS:
47+
logger.error(f"LLM_PROVIDER 值错误,应为 {LLM_PROVIDERS} 之一。")
48+
return
49+
50+
required_keys = LLM_REQUIRED_KEYS.get(llm_provider, [])
51+
missing_keys = [key for key in required_keys if not os.getenv(key)]
52+
53+
if missing_keys:
54+
logger.error(f"当前 LLM 供应商为 {llm_provider},但缺少必要的环境变量: {', '.join(missing_keys)}")
55+
else:
56+
logger.info(f"LLM 供应商 {llm_provider} 的配置项已设置。")
57+
58+
def check_llm_connectivity():
59+
client = Factory().getClient()
60+
logger.info(f"正在检查 LLM 供应商的连接...")
61+
if client.ping():
62+
logger.info("LLM 可以连接成功。")
63+
else:
64+
logger.error("LLM连接可能有问题,请检查配置项。")
65+
66+
def check_config():
67+
"""主检查入口"""
68+
logger.info("开始检查配置项...")
69+
check_env_vars()
70+
check_llm_provider()
71+
check_llm_connectivity()
72+
logger.info("配置项检查完成。")

0 commit comments

Comments
 (0)