Skip to content

Commit ddd5898

Browse files
authored
Fix: include usage tokens in streaming LLM calls to avoid inaccurate estimation
1 parent 065cb10 commit ddd5898

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

api/db/services/llm_service.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,9 +94,12 @@ def __init__(self, tenant_id: str, model_config: dict, lang="Chinese",
9494

9595
def _log_usage(self, model_type: str, usage: LLMUsage):
9696
"""将本次 LLM 调用的 token 消耗写入明细日志表,失败只记日志不影响主流程。"""
97+
tenant_llm_id = self.model_config.get("id")
98+
if not tenant_llm_id:
99+
return
97100
LLMUsageLogService.create(
98101
tenant_id=self.tenant_id,
99-
tenant_llm_id=self.model_config["id"],
102+
tenant_llm_id=tenant_llm_id,
100103
model_type=model_type,
101104
user_id=self.user_id,
102105
biz_type=self.biz_type,

0 commit comments

Comments
 (0)